addtestdata adding images to correct folder, but in the wrong format

This commit is contained in:
Sam
2021-02-15 13:59:15 +00:00
parent 95dc064ed2
commit e2b6f02b37
4 changed files with 27 additions and 36 deletions

View File

@@ -21,9 +21,11 @@ DATABASES = {
}, },
} }
MEDIA_ROOT = BASE_DIR + '/../media/'
MEDIA_URL = '/media/' MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR + '/../media/'
GEOIP_PATH = BASE_DIR + '/../datasets/' GEOIP_PATH = BASE_DIR + '/../datasets/'
# MEDIA_ROOT = os.path.join(BASE_DIR, '/../media/')
# GEOIP_PATH = os.path.join(BASE_DIR, '/../datasets/')
# JWT SETTINGS # JWT SETTINGS
SIMPLE_JWT = { SIMPLE_JWT = {

View File

@@ -1,6 +1,7 @@
import logging import logging
import json import json
import shutil import shutil
from io import BytesIO
import requests import requests
@@ -8,6 +9,7 @@ from django.core.files import File
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon from django.contrib.gis.geos import GEOSGeometry, MultiPolygon
from django.conf import settings from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from faker import Faker from faker import Faker
from PIL import Image from PIL import Image
@@ -25,6 +27,7 @@ logging.basicConfig(
level=logging.INFO, level=logging.INFO,
) )
class Command(BaseCommand): class Command(BaseCommand):
logo_url = "https://picsum.photos/300/200" logo_url = "https://picsum.photos/300/200"
@@ -61,44 +64,32 @@ class Command(BaseCommand):
logging.info(f"Creating Products for {company.company_name}") logging.info(f"Creating Products for {company.company_name}")
# for i in range(100): # for i in range(100):
for i in range(10): for i in range(10):
# make up data
name = fake.last_name_nonbinary() name = fake.last_name_nonbinary()
description = fake.paragraph(nb_sentences=5) description = fake.paragraph(nb_sentences=5)
# TODO: apply tags from tag list # TODO: apply automatic tags from tag list
image_path = settings.MEDIA_ROOT + company.company_name + '.jpg'
# TODO: write image to S3 storage # TODO: write image to S3 storage
response = requests.get(self.logo_url, stream=True) # create instance
# import ipdb; ipdb.set_trace()
# write image to disk
'''
if response.status_code == 200:
with open(image_path, 'wb') as f:
for chunk in response:
import ipdb; ipdb.set_trace()
# f.write(chunk))
else:
logging.warning(f"Got {response.status_code} querying {self.logo_url}")
'''
if response.status_code == 200:
with open(image_path, 'wb') as f:
response.raw.decode_content = True
shutil.copyfileobj(response.raw, f)
image = response.raw.read()
else:
logging.warning(f"Got {response.status_code} querying {self.logo_url}")
continue
image = Image.open(image_path)
# import ipdb; ipdb.set_trace()
product = ProductFactory(name=name, description=description) product = ProductFactory(name=name, description=description)
product.image.save(
image_path, # get image
# image, response = requests.get(self.logo_url, stream=True)
File(open(image_path, 'rb')), response.raw.decode_content = True
save=True) # image=Image.open(image_path)) image = Image.open(response.raw)
# read image from memory
img_io = BytesIO()
image.save(img_io, format='JPEG')
product.image = InMemoryUploadedFile(
BytesIO(),
field_name=None,
name=f"{company.company_name}-{name}.jpg",
content_type='image/jpeg',
size=img_io.tell,
charset=None
)
product.save() product.save()
logging.debug(f"New Product {product.name} created") logging.debug(f"New Product {product.name} created")
print("*", end = '.') print("*", end = '.')
print('') print('')

View File

@@ -485,7 +485,6 @@ class ProductSearchTest(TestCase):
# check for filters # check for filters
self.assertNotEquals([], payload['filters']['singles']) self.assertNotEquals([], payload['filters']['singles'])
self.assertTrue(len(payload['filters']) >= 2 ) self.assertTrue(len(payload['filters']) >= 2 )
import ipdb; ipdb.set_trace()
class MyProductsViewTest(APITestCase): class MyProductsViewTest(APITestCase):

View File

@@ -15,7 +15,6 @@ def extract_search_filters(result_set):
'singles': set(), 'singles': set(),
} }
for item in result_set: for item in result_set:
# import ipdb; ipdb.set_trace()
try: try:
# extract tags # extract tags
tags = item.tags.all() tags = item.tags.all()