From bd8d05bd394d05099dde78cc0b0680a6af988e7e Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 12 Feb 2021 12:34:08 +0000 Subject: [PATCH 01/14] changes to readme, and addtaxonomy --- README.md | 20 ++++++++++++++------ core/management/commands/addtaxonomy.py | 8 +++++--- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index f03b15a..9ff4e0e 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,10 @@ This README aims to document functionality of backend as well as required steps ## Table of Contents - [First Steps](#first-steps) -- [Location Data](#location-data) + - [Load location data](#load-location-data) + - [Load taxonomy data](#load-taxonomy-data) - [Endpoints](#endpoints) -- [Data Load](#data-load) +- [Massive Data Load Endpoints](#massive-data-load-endpoints) - [GeoIP Setup](#geoip-setup) - [Development Utils](#development-utils) @@ -35,13 +36,20 @@ python manage.py migrate - Start server in development mode: `python manage.py runserver` -## Location data + +### Load Location Data To load initial location data use: `python manage.py loadgisdata` -## Endpoints +### Load Taxonomy Data +This data serves as initial Tags + +To load initial set of tags: `python manage.py addtaxonomy` + + +## Endpoints ### User Management @@ -146,7 +154,7 @@ Endpoint url: `/api/v1/stats/` logs about user interaction with products links -### Geo location +## Geo location Location ednpoints: @@ -156,7 +164,7 @@ Location ednpoints: - `/api/v1/cities/` -## Load Data +## Massive Data Data Endpoints ### COOP and Managing User Data Load diff --git a/core/management/commands/addtaxonomy.py b/core/management/commands/addtaxonomy.py index d1d84fe..c58a1d3 100644 --- a/core/management/commands/addtaxonomy.py +++ b/core/management/commands/addtaxonomy.py @@ -4,11 +4,12 @@ from django.core.management.base import BaseCommand from django.conf import settings from core.models import TreeTag +from products.models import Product class Command(BaseCommand): - help = 'Load taxonomy terms into Tags' + help = 'Load taxonomy terms into Product.tags' def handle(self, *args, **kwargs): @@ -22,11 +23,12 @@ class Command(BaseCommand): print(f"Reading from {settings.TAXONOMY_FILE}") for line in data_file.readlines(): try: - tag = TreeTag.objects.create(name=line) + # tag = TreeTag.objects.create(name=line) + tag = Product.tags.tag_model.objects.create(name=line) counter += 1 print('.', end='') logging.debug(f"{tag} created from {line}") except Exception as e: logging.error(f"{type(e)} while creating tags from {settings.TAXONOMY_FILE}") - print(f"\n{counter} new TreeTag instances created") + print(f"\nAdded {counter} Tag objects to Product.tags") print('Shutting down\n') From 682a41e82864f081678cf0cfbb04bb9825c059d2 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 12 Feb 2021 12:35:57 +0000 Subject: [PATCH 02/14] leftover file deleted --- core/migrations/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 core/migrations/__init__.py diff --git a/core/migrations/__init__.py b/core/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 From 798c960eadf6854e21da2fbb3671a2fdd1ef159e Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 12 Feb 2021 13:44:25 +0000 Subject: [PATCH 03/14] removed mytreetag model --- core/management/commands/addtaxonomy.py | 1 - products/models.py | 6 ------ 2 files changed, 7 deletions(-) diff --git a/core/management/commands/addtaxonomy.py b/core/management/commands/addtaxonomy.py index c58a1d3..569c4ac 100644 --- a/core/management/commands/addtaxonomy.py +++ b/core/management/commands/addtaxonomy.py @@ -23,7 +23,6 @@ class Command(BaseCommand): print(f"Reading from {settings.TAXONOMY_FILE}") for line in data_file.readlines(): try: - # tag = TreeTag.objects.create(name=line) tag = Product.tags.tag_model.objects.create(name=line) counter += 1 print('.', end='') diff --git a/products/models.py b/products/models.py index d3468dc..f6f7bd8 100644 --- a/products/models.py +++ b/products/models.py @@ -6,12 +6,6 @@ from companies.models import Company # Create your models here. -class MyTreeTags(TagTreeModel): - class TagMeta: - initial = "colors/blue, colors/red, colors/green" - force_lowercase = True - # autocomplete_view = 'myapp.views.hobbies_autocomplete' - class Product(models.Model): From eb174f027af424bc32813b56dd7884b88e7e877a Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 10:07:38 +0000 Subject: [PATCH 04/14] switched tag field to local TagTreeModel --- companies/models.py | 2 ++ core/management/commands/loadgisdata.py | 4 ++++ core/models.py | 3 ++- products/models.py | 5 +++-- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/companies/models.py b/companies/models.py index 1f44bf0..8b8a470 100644 --- a/companies/models.py +++ b/companies/models.py @@ -2,6 +2,8 @@ from django.contrib.gis.db import models from tagulous.models import TagField +# from core.models import TreeTag + # Create your models here. diff --git a/core/management/commands/loadgisdata.py b/core/management/commands/loadgisdata.py index efdd312..372d991 100644 --- a/core/management/commands/loadgisdata.py +++ b/core/management/commands/loadgisdata.py @@ -131,3 +131,7 @@ class Command(BaseCommand): logging.info(f"Region instances created: {region_counter}") logging.info(f"Province instances created: {province_counter}") logging.info(f"City instances created: {city_counter}") + print(f"Country instances created: {country_counter}") + print(f"Region instances created: {region_counter}") + print(f"Province instances created: {province_counter}") + print(f"City instances created: {city_counter}") diff --git a/core/models.py b/core/models.py index 2fccfe7..5e42300 100644 --- a/core/models.py +++ b/core/models.py @@ -76,6 +76,7 @@ class CustomUser(AbstractBaseUser, PermissionsMixin): class TreeTag(TagTreeModel): class TagMeta: - # initial = "food/eating, food/cooking, gaming/football" + initial = "" force_lowercase = True + max_count=20 # autocomplete_view = 'myapp.views.hobbies_autocomplete' diff --git a/products/models.py b/products/models.py index f6f7bd8..14cc0a9 100644 --- a/products/models.py +++ b/products/models.py @@ -2,6 +2,7 @@ from django.contrib.gis.db import models from tagulous.models import SingleTagField, TagField, TagTreeModel +from core.models import TreeTag from companies.models import Company # Create your models here. @@ -33,9 +34,9 @@ class Product(models.Model): update_date = models.DateTimeField('Fecha de actualización de producto', null=True, blank=True) discount = models.DecimalField('Descuento', max_digits=5, decimal_places=2, null=True, blank=True) stock = models.PositiveIntegerField('Stock', null=True) - tags = TagField(force_lowercase=True, max_count=20, tree=True) + tags = TagField(to=TreeTag) category = SingleTagField(null=True) # main tag category - attributes = TagField(force_lowercase=True, max_count=20, tree=True) + attributes = TagField(to=TreeTag, related_name='product_attributes') identifiers = models.TextField('Identificador único de producto', null=True, blank=True) # internal From ef0f36bff56dd65120ad7fa6513195845190145b Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 10:28:29 +0000 Subject: [PATCH 05/14] added initial support for nested tags in product search --- products/tests.py | 2 +- products/views.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/products/tests.py b/products/tests.py index 46497f3..1fec611 100644 --- a/products/tests.py +++ b/products/tests.py @@ -462,7 +462,7 @@ class ProductSearchTest(TestCase): def test_anon_user_can_search(self): expected_instances = [ self.factory(description="zapatos verdes"), - self.factory(tags="rojos"), + self.factory(tags="colores/rojos"), ] unexpected_instances = [ self.factory(description="chanclas"), diff --git a/products/views.py b/products/views.py index 45d765a..6ed03d3 100644 --- a/products/views.py +++ b/products/views.py @@ -161,7 +161,8 @@ def product_search(request): result_set.add(item) # search in tags - products = Product.objects.filter(tags=chunk) + tags = Product.tags.tag_model.objects.filter(name__icontains=chunk) + products = Product.objects.filter(tags__in=tags) for item in products: result_set.add(item) # search in category @@ -169,13 +170,16 @@ def product_search(request): for item in products: result_set.add(item) # search in attributes - products = Product.objects.filter(attributes=chunk) + attributes = Product.attributes.tag_model.objects.filter(name__icontains=chunk) + products = Product.objects.filter(attributes__in=attributes) for item in products: result_set.add(item) # extract filters from result_set filters = extract_search_filters(result_set) + # filters = {} # serialize and respond product_serializer = ProductSerializer(result_set, many=True) return Response(data={"filters": filters, "products": product_serializer.data}) except Exception as e: + import ipdb; ipdb.set_trace() return Response({"errors": {"details": str(type(e))}}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) From c38e9dd8228d64115895bf7fe1c49145ffc90e2b Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 11:07:22 +0000 Subject: [PATCH 06/14] unified the db queries in product search, one per chunk --- products/tests.py | 7 +++++-- products/views.py | 28 ++++++++++++---------------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/products/tests.py b/products/tests.py index 1fec611..a21ac34 100644 --- a/products/tests.py +++ b/products/tests.py @@ -462,7 +462,9 @@ class ProductSearchTest(TestCase): def test_anon_user_can_search(self): expected_instances = [ self.factory(description="zapatos verdes"), - self.factory(tags="colores/rojos"), + self.factory(tags="colores/rojos, "), + self.factory(description="zapatos rojos"), + self.factory(attributes='"lunares rojos", '), ] unexpected_instances = [ self.factory(description="chanclas"), @@ -476,10 +478,11 @@ class ProductSearchTest(TestCase): url = f"{self.endpoint}?query_string={query_string}" # send in request response = self.client.get(url) + payload = response.json() # check response self.assertEqual(response.status_code, 200) # check for object creation - self.assertEquals(len(response.data['products']), len(expected_instances)) + self.assertEquals(len(payload['products']), len(expected_instances)) class MyProductsViewTest(APITestCase): diff --git a/products/views.py b/products/views.py index 6ed03d3..f9f356b 100644 --- a/products/views.py +++ b/products/views.py @@ -155,31 +155,27 @@ def product_search(request): chunks = query_string.split(' ') for chunk in chunks: - # search inside name and description - products = Product.objects.filter(Q(name__icontains=chunk) | Q(description__icontains=chunk)) - for item in products: - result_set.add(item) - # search in tags tags = Product.tags.tag_model.objects.filter(name__icontains=chunk) - products = Product.objects.filter(tags__in=tags) - for item in products: - result_set.add(item) # search in category - products = Product.objects.filter(category=chunk) - for item in products: - result_set.add(item) + categories = Product.category.tag_model.objects.filter(name__icontains=chunk) # search in attributes attributes = Product.attributes.tag_model.objects.filter(name__icontains=chunk) - products = Product.objects.filter(attributes__in=attributes) - for item in products: - result_set.add(item) + # unified tag search + products_qs = Product.objects.filter( + Q(name__icontains=chunk)| + Q(description__icontains=chunk)| + Q(tags__in=tags)| + Q(category__in=categories)| + Q(attributes__in=attributes) + ) + for instance in products_qs: + result_set.add(instance) + # extract filters from result_set filters = extract_search_filters(result_set) - # filters = {} # serialize and respond product_serializer = ProductSerializer(result_set, many=True) return Response(data={"filters": filters, "products": product_serializer.data}) except Exception as e: - import ipdb; ipdb.set_trace() return Response({"errors": {"details": str(type(e))}}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) From 10ac56c5e984a5f01a1742c1826d38d6c8bf323b Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 11:59:24 +0000 Subject: [PATCH 07/14] improvemnets to filters provided by product search results --- products/tests.py | 4 ++-- products/utils.py | 53 ++++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 50 insertions(+), 7 deletions(-) diff --git a/products/tests.py b/products/tests.py index a21ac34..bfd73d5 100644 --- a/products/tests.py +++ b/products/tests.py @@ -471,8 +471,6 @@ class ProductSearchTest(TestCase): self.factory(tags="azules"), ] - self.factory(tags="azul") - query_string = quote("zapatos rojos") url = f"{self.endpoint}?query_string={query_string}" @@ -483,6 +481,8 @@ class ProductSearchTest(TestCase): self.assertEqual(response.status_code, 200) # check for object creation self.assertEquals(len(payload['products']), len(expected_instances)) + # check for filters + self.assertNotEquals([], payload['filters']['singles']) class MyProductsViewTest(APITestCase): diff --git a/products/utils.py b/products/utils.py index 3f5eeb1..13b04aa 100644 --- a/products/utils.py +++ b/products/utils.py @@ -1,9 +1,52 @@ +import logging + def extract_search_filters(result_set): - filters = set() + """ + Returned object should look something like: + { + "singles": [], # non tree tags + "entry_1": [ 'tag1', 'tag2' ], + "entry_2": [ 'tag1', 'tag2' ], + } + """ + filter_dict = { + 'singles': set(), + } for item in result_set: - tags = item.tags.all() - for tag in tags: - filters.add(tag.name) - return list(filters) + # import ipdb; ipdb.set_trace() + try: + # extract tags + tags = item.tags.all() + for tag in tags: + if len(tag.name.split('/')) == 1: + filter_dict['singles'].add(tag.name) + else: + # set penultimate tag as header + chunks = tag.name.split('/') + header = chunks[-2] + name = chunks[-1] + # check if + entry = filter_dict.get(header) + if entry is None: + filter_dict[header] = set() + filter_dict[header].add(name) + # extract attributes + attributes = item.attributes.all() + for tag in attributes: + if len(tag.name.split('/')) == 1: + filter_dict['singles'].add(tag.name) + else: + # set penultimate tag as header + chunks = tag.name.split('/') + header = chunks[-2] + name = chunks[-1] + # check if + entry = filter_dict.get(header) + if entry is None: + filter_dict[header] = set() + filter_dict[header].add(name) + except Exception as e: + logging.error(f'Extacting filters for {item}') + return filter_dict From a84ddcaeed560e6cc4a07aedd02dad5de38fc760 Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 12:09:01 +0000 Subject: [PATCH 08/14] readme update --- README.md | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 9ff4e0e..17f9de5 100644 --- a/README.md +++ b/README.md @@ -8,9 +8,14 @@ This README aims to document functionality of backend as well as required steps - [Load location data](#load-location-data) - [Load taxonomy data](#load-taxonomy-data) - [Endpoints](#endpoints) +- [Product Search](#product-search) - [Massive Data Load Endpoints](#massive-data-load-endpoints) + - [COOP and Managing User Data Load](#coop-and-managing-user-data-load) + - [Product Data Load](#product-data-load) - [GeoIP Setup](#geoip-setup) - [Development Utils](#development-utils) + - [Fake product data generation](#fake-product-data-generation) + ## First Steps @@ -154,7 +159,7 @@ Endpoint url: `/api/v1/stats/` logs about user interaction with products links -## Geo location +### Locations Location ednpoints: @@ -164,8 +169,30 @@ Location ednpoints: - `/api/v1/cities/` -## Massive Data Data Endpoints +## Product Search +Endpoint: `/api/v1/product_search/` + +Query parameters: + + - `query_string`: text from the search input box + + +Response format: + +```json +{ + "filters": { + "singles": ["tag1", "tag2"], // for tags that aren't nested + "entry_1": ["subtag 1", "subtag 2"], // for tree tags + "entry_2": ["subtag 1", "subtag 2"] // one per penultimate tag in tree + }, + "products" : [], // list of serialized instances, in order of relevancy +} + +``` + +## Massive Data Load Endpoints ### COOP and Managing User Data Load @@ -186,7 +213,6 @@ CSV headers: `id,nombre-producto,descripcion,imagen,url,precio,gastos-envio,cond Only admin users have access to endoint - ## GeoIP Setup Module: `geoip2` @@ -204,7 +230,7 @@ Optional: ## Development Utils -### Fake product load +### Fake product data generation To create a dataset of fake companies and products: From 7c6db6976ca925319f17887161923d753d178fbd Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 12:17:54 +0000 Subject: [PATCH 09/14] improvements to product search test --- products/tests.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/products/tests.py b/products/tests.py index bfd73d5..0c79054 100644 --- a/products/tests.py +++ b/products/tests.py @@ -461,10 +461,11 @@ class ProductSearchTest(TestCase): def test_anon_user_can_search(self): expected_instances = [ - self.factory(description="zapatos verdes"), - self.factory(tags="colores/rojos, "), - self.factory(description="zapatos rojos"), - self.factory(attributes='"lunares rojos", '), + self.factory(tags="lunares/blancos",description="zapatos verdes"), + self.factory(tags="colores/rojos, tono/brillante"), + self.factory(tags="lunares/azules", description="zapatos rojos"), + self.factory(tags="lunares/rojos", description="zapatos"), + self.factory(attributes='"zapatos de campo", tono/oscuro'), ] unexpected_instances = [ self.factory(description="chanclas"), @@ -483,6 +484,8 @@ class ProductSearchTest(TestCase): self.assertEquals(len(payload['products']), len(expected_instances)) # check for filters self.assertNotEquals([], payload['filters']['singles']) + self.assertTrue(len(payload['filters']) >= 2 ) + import ipdb; ipdb.set_trace() class MyProductsViewTest(APITestCase): From 95dc064ed259682bf5031ee79b3be9d2c7a0709e Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 13:27:42 +0000 Subject: [PATCH 10/14] fix for image im addtestdata --- core/management/commands/addtestdata.py | 45 ++++++++++++++++++++----- 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/core/management/commands/addtestdata.py b/core/management/commands/addtestdata.py index c4ad2c0..fbccecc 100644 --- a/core/management/commands/addtestdata.py +++ b/core/management/commands/addtestdata.py @@ -1,12 +1,16 @@ import logging import json +import shutil import requests +from django.core.files import File from django.core.management.base import BaseCommand from django.contrib.gis.geos import GEOSGeometry, MultiPolygon +from django.conf import settings from faker import Faker +from PIL import Image from companies.factories import CompanyFactory from companies.models import Company @@ -23,7 +27,7 @@ logging.basicConfig( class Command(BaseCommand): - logo_url = "https://picsum.photos/200/300" + logo_url = "https://picsum.photos/300/200" help = 'Creates fake companies and related products in database' def handle(self, *args, **kwargs): @@ -55,23 +59,46 @@ class Command(BaseCommand): for company in new_companies: print("Creating fake products for {company.company_name}") logging.info(f"Creating Products for {company.company_name}") - for i in range(100): + # for i in range(100): + for i in range(10): name = fake.last_name_nonbinary() description = fake.paragraph(nb_sentences=5) # TODO: apply tags from tag list + image_path = settings.MEDIA_ROOT + company.company_name + '.jpg' - image= None - """ # TODO: write image to S3 storage - response = requests.get(self.logo_url) + response = requests.get(self.logo_url, stream=True) + # import ipdb; ipdb.set_trace() + # write image to disk + ''' if response.status_code == 200: - response.raw.decode_content = True - image = response.raw.read() + with open(image_path, 'wb') as f: + for chunk in response: + import ipdb; ipdb.set_trace() + # f.write(chunk)) else: logging.warning(f"Got {response.status_code} querying {self.logo_url}") - """ + ''' - product = ProductFactory(name=name, description=description, image=image) + if response.status_code == 200: + with open(image_path, 'wb') as f: + response.raw.decode_content = True + shutil.copyfileobj(response.raw, f) + image = response.raw.read() + else: + logging.warning(f"Got {response.status_code} querying {self.logo_url}") + continue + + image = Image.open(image_path) + + # import ipdb; ipdb.set_trace() + product = ProductFactory(name=name, description=description) + product.image.save( + image_path, + # image, + File(open(image_path, 'rb')), + save=True) # image=Image.open(image_path)) + product.save() logging.debug(f"New Product {product.name} created") print("*", end = '.') print('') From e2b6f02b37ea0155038627486e5944b3ea0dc302 Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 15 Feb 2021 13:59:15 +0000 Subject: [PATCH 11/14] addtestdata adding images to correct folder, but in the wrong format --- back_latienda/settings/development.py | 4 +- core/management/commands/addtestdata.py | 57 +++++++++++-------------- products/tests.py | 1 - products/utils.py | 1 - 4 files changed, 27 insertions(+), 36 deletions(-) diff --git a/back_latienda/settings/development.py b/back_latienda/settings/development.py index fda1182..57d96b8 100644 --- a/back_latienda/settings/development.py +++ b/back_latienda/settings/development.py @@ -21,9 +21,11 @@ DATABASES = { }, } -MEDIA_ROOT = BASE_DIR + '/../media/' MEDIA_URL = '/media/' +MEDIA_ROOT = BASE_DIR + '/../media/' GEOIP_PATH = BASE_DIR + '/../datasets/' +# MEDIA_ROOT = os.path.join(BASE_DIR, '/../media/') +# GEOIP_PATH = os.path.join(BASE_DIR, '/../datasets/') # JWT SETTINGS SIMPLE_JWT = { diff --git a/core/management/commands/addtestdata.py b/core/management/commands/addtestdata.py index fbccecc..ca88287 100644 --- a/core/management/commands/addtestdata.py +++ b/core/management/commands/addtestdata.py @@ -1,6 +1,7 @@ import logging import json import shutil +from io import BytesIO import requests @@ -8,6 +9,7 @@ from django.core.files import File from django.core.management.base import BaseCommand from django.contrib.gis.geos import GEOSGeometry, MultiPolygon from django.conf import settings +from django.core.files.uploadedfile import InMemoryUploadedFile from faker import Faker from PIL import Image @@ -25,6 +27,7 @@ logging.basicConfig( level=logging.INFO, ) + class Command(BaseCommand): logo_url = "https://picsum.photos/300/200" @@ -61,44 +64,32 @@ class Command(BaseCommand): logging.info(f"Creating Products for {company.company_name}") # for i in range(100): for i in range(10): + # make up data name = fake.last_name_nonbinary() description = fake.paragraph(nb_sentences=5) - # TODO: apply tags from tag list - image_path = settings.MEDIA_ROOT + company.company_name + '.jpg' - + # TODO: apply automatic tags from tag list # TODO: write image to S3 storage - response = requests.get(self.logo_url, stream=True) - # import ipdb; ipdb.set_trace() - # write image to disk - ''' - if response.status_code == 200: - with open(image_path, 'wb') as f: - for chunk in response: - import ipdb; ipdb.set_trace() - # f.write(chunk)) - else: - logging.warning(f"Got {response.status_code} querying {self.logo_url}") - ''' - - if response.status_code == 200: - with open(image_path, 'wb') as f: - response.raw.decode_content = True - shutil.copyfileobj(response.raw, f) - image = response.raw.read() - else: - logging.warning(f"Got {response.status_code} querying {self.logo_url}") - continue - - image = Image.open(image_path) - - # import ipdb; ipdb.set_trace() + # create instance product = ProductFactory(name=name, description=description) - product.image.save( - image_path, - # image, - File(open(image_path, 'rb')), - save=True) # image=Image.open(image_path)) + + # get image + response = requests.get(self.logo_url, stream=True) + response.raw.decode_content = True + image = Image.open(response.raw) + + # read image from memory + img_io = BytesIO() + image.save(img_io, format='JPEG') + product.image = InMemoryUploadedFile( + BytesIO(), + field_name=None, + name=f"{company.company_name}-{name}.jpg", + content_type='image/jpeg', + size=img_io.tell, + charset=None + ) product.save() + logging.debug(f"New Product {product.name} created") print("*", end = '.') print('') diff --git a/products/tests.py b/products/tests.py index 0c79054..a71db66 100644 --- a/products/tests.py +++ b/products/tests.py @@ -485,7 +485,6 @@ class ProductSearchTest(TestCase): # check for filters self.assertNotEquals([], payload['filters']['singles']) self.assertTrue(len(payload['filters']) >= 2 ) - import ipdb; ipdb.set_trace() class MyProductsViewTest(APITestCase): diff --git a/products/utils.py b/products/utils.py index 13b04aa..7a08a70 100644 --- a/products/utils.py +++ b/products/utils.py @@ -15,7 +15,6 @@ def extract_search_filters(result_set): 'singles': set(), } for item in result_set: - # import ipdb; ipdb.set_trace() try: # extract tags tags = item.tags.all() From f71f822dabe255fd0c3cbb26aedda462bf283399 Mon Sep 17 00:00:00 2001 From: Sam Date: Tue, 16 Feb 2021 10:26:26 +0000 Subject: [PATCH 12/14] addtestdata correctly saving images for product --- core/management/commands/addtestdata.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/core/management/commands/addtestdata.py b/core/management/commands/addtestdata.py index ca88287..2d98449 100644 --- a/core/management/commands/addtestdata.py +++ b/core/management/commands/addtestdata.py @@ -60,8 +60,8 @@ class Command(BaseCommand): # create and assign products to companies for company in new_companies: - print("Creating fake products for {company.company_name}") - logging.info(f"Creating Products for {company.company_name}") + print(f"Creating fake products for {company.company_name}") + logging.info(f"Creating fake Products for {company.company_name}") # for i in range(100): for i in range(10): # make up data @@ -77,9 +77,11 @@ class Command(BaseCommand): response.raw.decode_content = True image = Image.open(response.raw) - # read image from memory + img_io = BytesIO() image.save(img_io, format='JPEG') + # option 1: read image from memory + ''' product.image = InMemoryUploadedFile( BytesIO(), field_name=None, @@ -88,10 +90,15 @@ class Command(BaseCommand): size=img_io.tell, charset=None ) + ''' + # option 2: File object + + product.image.save(f"{company.company_name}-{name}.jpg", File(img_io), save=False) product.save() logging.debug(f"New Product {product.name} created") print("*", end = '.') + # import ipdb; ipdb.set_trace() print('') print("Dataset creation finished") \ No newline at end of file From e471b57ec3327835088a14532e917b7664da7c25 Mon Sep 17 00:00:00 2001 From: Sam Date: Tue, 16 Feb 2021 10:29:04 +0000 Subject: [PATCH 13/14] cleanup --- core/management/commands/addtestdata.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/core/management/commands/addtestdata.py b/core/management/commands/addtestdata.py index 2d98449..7d98462 100644 --- a/core/management/commands/addtestdata.py +++ b/core/management/commands/addtestdata.py @@ -77,28 +77,13 @@ class Command(BaseCommand): response.raw.decode_content = True image = Image.open(response.raw) - + # save using File object img_io = BytesIO() image.save(img_io, format='JPEG') - # option 1: read image from memory - ''' - product.image = InMemoryUploadedFile( - BytesIO(), - field_name=None, - name=f"{company.company_name}-{name}.jpg", - content_type='image/jpeg', - size=img_io.tell, - charset=None - ) - ''' - # option 2: File object - product.image.save(f"{company.company_name}-{name}.jpg", File(img_io), save=False) product.save() logging.debug(f"New Product {product.name} created") - print("*", end = '.') - # import ipdb; ipdb.set_trace() print('') print("Dataset creation finished") \ No newline at end of file From 0a6762defd9e7932f7397c9f2395334448388757 Mon Sep 17 00:00:00 2001 From: Sam Date: Tue, 16 Feb 2021 10:36:58 +0000 Subject: [PATCH 14/14] readme update --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 17f9de5..c7155f1 100644 --- a/README.md +++ b/README.md @@ -184,14 +184,17 @@ Response format: { "filters": { "singles": ["tag1", "tag2"], // for tags that aren't nested - "entry_1": ["subtag 1", "subtag 2"], // for tree tags - "entry_2": ["subtag 1", "subtag 2"] // one per penultimate tag in tree + "entry_1": ["subtag_1", "subtag_2"], // for tree tags like entry_1/subtag_1 + "entry_2": ["subtag_1", "subtag_2"] // one per penultimate tag in tree }, "products" : [], // list of serialized instances, in order of relevancy } ``` +Check out `products.tests..ProductSearchTest` for a practical case. + + ## Massive Data Load Endpoints ### COOP and Managing User Data Load