-
Notifications
You must be signed in to change notification settings - Fork 0
/
apperal.py
144 lines (112 loc) · 4.96 KB
/
apperal.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# -*- coding: utf-8 -*-
"""Apperal.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1SfP1nwZP3W8EOysRpXfYotu886lSCU1i
"""
import requests
from bs4 import BeautifulSoup
import json
from google.colab.patches import cv2_imshow
import cv2
import numpy as np
results_per_page = 100
raw_data = requests.get(f"https://ac.cnstrc.com/browse/group_id/sneakers?c=ciojs-client-2.29.12&key=key_XT7bjdbvjgECO5d8&i=e5c33ecc-dc20-4868-88ed-1a1709a8e95a&s=1&page=1&num_results_per_page={results_per_page}&fmt_options%5Bhidden_fields%5D=gp_lowest_price_cents_3&fmt_options%5Bhidden_fields%5D=gp_instant_ship_lowest_price_cents_3&fmt_options%5Bhidden_facets%5D=gp_lowest_price_cents_3&fmt_options%5Bhidden_facets%5D=gp_instant_ship_lowest_price_cents_3&_dt=1669306196954")
parsed_data = BeautifulSoup(raw_data.text, 'html5lib')
apperal_sneaker_data = json.loads(raw_data.text)
for sneaker in apperal_sneaker_data['response']['results']:
print('\33[1m'+ sneaker['value'] + ", $" + str(sneaker['data']['lowest_price_cents'] / 100))
shoe_images = requests.get(sneaker['data']['image_url'])
with open('shoes.png', 'wb') as f:
f.write(shoe_images.content)
shoes_PNG = cv2.imread('shoes.png', 1)
cv2_imshow(shoes_PNG)
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D, BatchNormalization
import pickle
IMG_SIZE = 50
training_data = []
for sneaker in apperal_sneaker_data['response']['results']:
price = sneaker['data']['lowest_price_cents'] / 100
shoe_images = requests.get(sneaker['data']['image_url'])
with open('shoes.png', 'wb') as f:
f.write(shoe_images.content)
img_array = cv2.imread('shoes.png', cv2.IMREAD_GRAYSCALE)
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
training_data.append([new_array, price])
og_training_data = training_data
import random
random.shuffle(training_data)
X = []
y = []
for features, labels in training_data:
X.append(features)
y.append(labels)
X = np.array(X).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
y = np.array(y)
max_price = y.max()
y = y/max_price
X = X/255.0
model = Sequential()
model.add(Conv2D(64, (3, 3), input_shape=X.shape[1:]))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam')
model.fit(X, y, batch_size=32, epochs=250, validation_split=0.1)
preds = model.predict(X)
prediction_values = []
for prediction in preds:
for price in prediction:
prediction_values.append(price * max_price)
average = []
for i in range(len(prediction_values)):
print(str(training_data[i][1]) + ' | ' + str(prediction_values[i]))
print(f"Error: {(abs(training_data[i][1] - prediction_values[i]) / training_data[i][1]) * 100}%")
average.append((abs(training_data[i][1] - prediction_values[i]) / training_data[i][1]) * 100)
print(sum(average)/len(average))
#testing
results_per_page = 150
raw_data = requests.get(f"https://ac.cnstrc.com/browse/group_id/sneakers?c=ciojs-client-2.29.12&key=key_XT7bjdbvjgECO5d8&i=e5c33ecc-dc20-4868-88ed-1a1709a8e95a&s=1&page=1&num_results_per_page={results_per_page}&fmt_options%5Bhidden_fields%5D=gp_lowest_price_cents_3&fmt_options%5Bhidden_fields%5D=gp_instant_ship_lowest_price_cents_3&fmt_options%5Bhidden_facets%5D=gp_lowest_price_cents_3&fmt_options%5Bhidden_facets%5D=gp_instant_ship_lowest_price_cents_3&_dt=1669306196954")
apperal_sneaker_data = json.loads(raw_data.text)
testing_data = []
for sneaker in apperal_sneaker_data['response']['results']:
price = sneaker['data']['lowest_price_cents'] / 100
shoe_images = requests.get(sneaker['data']['image_url'])
with open('shoes.png', 'wb') as f:
f.write(shoe_images.content)
img_array = cv2.imread('shoes.png', cv2.IMREAD_GRAYSCALE)
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
testing_data.append([new_array, price])
testing_data = testing_data
import random
random.shuffle(testing_data)
testingX = []
testingy = []
for features, labels in testing_data:
testingX.append(features)
testingy.append(labels)
testingX = np.array(testingX).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
testingy = np.array(testingy)
max_price = testingy.max()
testingy = testingy/max_price
testingX = testingX/255.0
model.predict(testingX)
prediction_values = []
for prediction in preds:
for price in prediction:
prediction_values.append(price * max_price)
average =[]
for i in range(len(prediction_values)):
print(str(training_data[i][1]) + ' | ' + str(prediction_values[i]))
print(f"Error: {(abs(training_data[i][1] - prediction_values[i]) / testing_data[i][1]) * 100}%")
average.append(abs(training_data[i][1] - prediction_values[i]) / testing_data[i][1] * 100)
print()
print(f"Average Error: {sum(average)/len(average)}")