From 5829b107f9ac128f1e402d91679551642c0d56d8 Mon Sep 17 00:00:00 2001 From: Sam Date: Thu, 25 Feb 2021 13:39:13 +0000 Subject: [PATCH 1/8] improvements to WC migration script --- companies/models.py | 1 + utils/woocommerce.py | 37 ++++++++++++++++++++++--------------- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/companies/models.py b/companies/models.py index 3d0efdd..35a27ca 100644 --- a/companies/models.py +++ b/companies/models.py @@ -46,6 +46,7 @@ class Company(models.Model): is_validated = models.BooleanField('Validado', default=False, null=True, blank=True) is_active = models.BooleanField('Activado', default=False, null=True, blank=True) credentials = JSONField(null=True) + shipping_terms = models.TextField('Condiciones de envĂ­o', null=True, blank=True) # internal created = models.DateTimeField('date of creation', auto_now_add=True) diff --git a/utils/woocommerce.py b/utils/woocommerce.py index 1683651..be2a34c 100644 --- a/utils/woocommerce.py +++ b/utils/woocommerce.py @@ -59,7 +59,7 @@ def create_imported_product(info, company, history, user): 'name': info.get('name'), 'description': BeautifulSoup(info.get('description', ''), "lxml").text, 'sku': info.get('sku'), - 'price': info.get('price'), + 'price': None if info.get('price') == '' else info.get('price'), } # alternative method @@ -73,20 +73,27 @@ def create_imported_product(info, company, history, user): except Exception as e: logging.error(f"Could not create product instance: {str(e)}") return None - try: - # get image - headers={"User-Agent" : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"} - image_url = info['images'][0]['src'] - response = requests.get(image_url, stream=True, headers=headers) - response.raw.decode_content = True - image = Image.open(response.raw) - # save using File object - img_io = BytesIO() - image.save(img_io, format='JPEG') - new.image.save(f"{new.name}-{new.sku}.jpg", File(img_io), save=False) - new.save() - except Exception as e: - logging.error(f"Could not add image to product: {str(e)}") + + if len(info['images']) > 0: + try: + # get image + headers={"User-Agent" : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"} + image_url = info['images'][0]['src'] + response = requests.get(image_url, stream=True, headers=headers) + assert(response.status_code==200) + response.raw.decode_content = True + image = Image.open(response.raw) + # save using File object + img_io = BytesIO() + image.save(img_io, format=image.format) + new.image.save(f"{new.name}-{new.sku}.jpg", File(img_io), save=False) + new.save() + except AssertionError as e: + logging.error(f"Source image [{info['images'][0]['src']}] not reachable: {response.status_code}") + except Exception as e: + logging.error(f"Could not add image to product {new.sku} from [{info['images'][0]['src']}]: {str(e)}") + else: + logging.info(f"No image for Product {new.name}") return new else: logging.error(f"{serializer.errors}") From 874afc81ac67d476311659449e298206c3bbbf76 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 10:17:27 +0000 Subject: [PATCH 2/8] added search result serializer to empty string search query --- products/tests.py | 2 +- products/views.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/products/tests.py b/products/tests.py index 7cb3b2e..3881cd7 100644 --- a/products/tests.py +++ b/products/tests.py @@ -541,7 +541,7 @@ class ProductSearchTest(TestCase): q = quote("zapatos rojos") limit = 2 - url = f"{self.endpoint}?q={q}&limit=2" + url = f"{self.endpoint}?q={q}&limit={limit}" # send in request response = self.client.get(url) diff --git a/products/views.py b/products/views.py index 88b1ed9..8206444 100644 --- a/products/views.py +++ b/products/views.py @@ -188,7 +188,7 @@ def product_search(request): return Response({"errors": {"details": "No query string to parse"}}) elif q is '': # return everything - serializer = ProductSerializer(Product.objects.all(), many=True) + serializer = SearchResultSerializer(Product.objects.all(), many=True) products = serializer.data # filters = extract_search_filters(products) return Response(data={"filters": [], "count": len(products), "products": products}) From 9b02c05f4e3b7f3800b9344443b96f8706e10289 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 10:24:02 +0000 Subject: [PATCH 3/8] disabled DRF web interface --- back_latienda/settings/development.py | 4 ++++ back_latienda/settings/production.py | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/back_latienda/settings/development.py b/back_latienda/settings/development.py index 56c8279..af1f405 100644 --- a/back_latienda/settings/development.py +++ b/back_latienda/settings/development.py @@ -51,3 +51,7 @@ SIMPLE_JWT = { } EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' + +# disable web interface for REST backend + +# REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = ('rest_framework.renderers.JSONRenderer',) diff --git a/back_latienda/settings/production.py b/back_latienda/settings/production.py index 5dbf4c2..46fb796 100644 --- a/back_latienda/settings/production.py +++ b/back_latienda/settings/production.py @@ -80,3 +80,8 @@ SIMPLE_JWT = { 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1), } +# disable web interface for REST backend + +REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = ('rest_framework.renderers.JSONRenderer',) + + From 86020afd27087acd4aed094fb2f892af02b15da4 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 10:58:36 +0000 Subject: [PATCH 4/8] search response includes min and max prices --- products/tests.py | 9 +++++++++ products/utils.py | 8 +++++++- products/views.py | 30 ++++++++++++++++++++++++------ 3 files changed, 40 insertions(+), 7 deletions(-) diff --git a/products/tests.py b/products/tests.py index 3881cd7..745875b 100644 --- a/products/tests.py +++ b/products/tests.py @@ -514,6 +514,12 @@ class ProductSearchTest(TestCase): self.assertEqual(response.status_code, 200) # load response data payload = response.json() + # check for expected fields in payload + self.assertIsNotNone(payload.get('filters')) + self.assertIsNotNone(payload.get('count')) + self.assertIsNotNone(payload.get('products')) + self.assertIsNotNone(payload.get('prices')) + # check for object creation self.assertEquals(len(payload['products']), len(expected_instances)) # check results ordered by rank @@ -524,6 +530,9 @@ class ProductSearchTest(TestCase): # check for filters self.assertNotEquals([], payload['filters']['tags']['singles']) self.assertTrue(len(payload['filters']['tags']) >= 2 ) + # check prices + self.assertTrue(payload['prices']['min'] <= payload['prices']['max']) + def test_anon_user_can_paginate_search(self): expected_instances = [ diff --git a/products/utils.py b/products/utils.py index 8cfe030..3d51ce6 100644 --- a/products/utils.py +++ b/products/utils.py @@ -2,6 +2,7 @@ import logging from django.db.models import Q from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity +from django.db.models import Max, Min from products.models import Product @@ -189,7 +190,12 @@ def find_related_products_v6(keyword, shipping_cost=None, discount=None, categor if price_max is not None: products_qs = products_qs.filter(price__lt=price_max) - return set(products_qs) + # get min_price and max_price + min_price = products_qs.aggregate(Min('price')) + max_price = products_qs.aggregate(Max('price')) + + + return set(products_qs), min_price, max_price def find_related_products_v4(keyword): diff --git a/products/views.py b/products/views.py index 8206444..8e45772 100644 --- a/products/views.py +++ b/products/views.py @@ -1,11 +1,7 @@ import logging import csv import datetime -import operator -from functools import reduce -from django.shortcuts import render -from django.conf import settings from django.db.models import Q from django.core import serializers @@ -157,6 +153,16 @@ def product_search(request): - category: string - tags: string - order: string (newest/oldest) + - price_min: int + - price_max: int + + In the response: + - filters + - count + - products + - price_min + - price_max + """ # capture query params q = request.GET.get('q', None) @@ -195,11 +201,22 @@ def product_search(request): try: # we collect our results here result_set = set() + # values for response + prices = { + 'min': None, + 'max': None, + } # split query string into single words chunks = q.split(' ') for chunk in chunks: - product_set = find_related_products_v6(chunk, shipping_cost, discount, category, tags, price_min, price_max) + product_set, min_price, max_price = find_related_products_v6(chunk, shipping_cost, discount, category, tags, price_min, price_max) + # update price values + # import ipdb; ipdb.set_trace() + if prices['min'] is None or min_price['price__min'] < prices['min']: + prices['min'] = min_price['price__min'] + if prices['max'] is None or max_price['price__max'] > prices['max']: + prices['max'] = max_price['price__max'] # add to result set result_set.update(product_set) # TODO: add search for entire phrase ??? @@ -218,6 +235,7 @@ def product_search(request): # order results by RANK ordered_products = sorted(result_list, key= lambda rank:rank.rank, reverse=True) + # extract max and min price values serializer = SearchResultSerializer(ordered_products, many=True) product_results = [dict(i) for i in serializer.data] total_results = len(product_results) @@ -231,6 +249,6 @@ def product_search(request): limit = int(limit) product_results = product_results[:limit] - return Response(data={"filters": filters, "count": total_results, "products": product_results}) + return Response(data={"filters": filters, "count": total_results, "products": product_results, 'prices': prices}) except Exception as e: return Response({"errors": {"details": str(e)}}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) From 0f7af9c0fb3eb175ef8d6149da0fbef2efaf3117 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 11:46:40 +0000 Subject: [PATCH 5/8] isolated cooploading functionality --- core/utils.py | 39 +++++++++++++++++++++++++++++++++++++++ core/views.py | 30 +++--------------------------- products/tests.py | 1 - products/views.py | 1 - 4 files changed, 42 insertions(+), 29 deletions(-) diff --git a/core/utils.py b/core/utils.py index 732b99b..b94035d 100644 --- a/core/utils.py +++ b/core/utils.py @@ -13,6 +13,8 @@ from rest_framework_simplejwt.tokens import RefreshToken from tagulous.models import TagModel +from companies.models import Company + User = get_user_model() @@ -98,3 +100,40 @@ def reformat_google_taxonomy(file_name): line = line.replace(' > ', '/') destination_file.write(line) + +def coop_loader(csv_reader, request): + """ + Parse csv data and extract: + + - coop data + - manager user data + + Return counts + """ + coop_counter = 0 + user_counter = 0 + for row in csv_reader: + if '' in (row['cif'], row['nombre-coop'], row['email']): + logging.error(f"Required data missing: {row}") + continue + try: + coop_data = { + 'cif': row['cif'].strip(), + 'company_name': row['nombre-coop'].strip(), + 'short_name': row['nombre-corto'].strip(), + 'shop': bool(row['es-tienda'].strip()), + 'shop_link': row['url'].strip(), + } + coop = Company.objects.create(**coop_data) + logging.info(f"Created Coop: {coop_data}") + coop_counter += 1 + + coop_user = User.objects.create_user(email=row['email'], company=coop, role='COOP_MANAGER', is_active=False) + # send confirmation email + send_verification_email(request, coop_user) + logging.info(f"Created User: {coop_user}") + user_counter += 1 + except Exception as e: + logging.error(f"Could not parse {row}") + return coop_counter, user_counter + diff --git a/core/views.py b/core/views.py index b3a551a..2eec7cb 100644 --- a/core/views.py +++ b/core/views.py @@ -183,35 +183,11 @@ def load_coop_managers(request): logging.info(f"Reading contents of {csv_file.name}") decoded_file = csv_file.read().decode('utf-8').splitlines() csv_reader = csv.DictReader(decoded_file, delimiter=',') - coop_counter = 0 - user_counter = 0 - for row in csv_reader: - if '' in (row['cif'], row['nombre-coop'], row['email']): - logging.error(f"Required data missing: {row}") - continue - try: - coop_data = { - 'cif': row['cif'].strip(), - 'company_name': row['nombre-coop'].strip(), - 'short_name': row['nombre-corto'].strip(), - 'shop': bool(row['es-tienda'].strip()), - 'shop_link': row['url'].strip(), - } - coop = Company.objects.create(**coop_data) - logging.info(f"Created Coop: {coop_data}") - coop_counter += 1 + coop_count, user_count = utils.coop_loader(csv_reader, request) - coop_user = User.objects.create_user(email=row['email'], company=coop, role='COOP_MANAGER', is_active=False) - # send confirmation email - utils.send_verification_email(request, coop_user) - logging.info(f"Created User: {coop_user}") - user_counter += 1 - except Exception as e: - logging.error(f"Could not parse {row}") - - return Response() + return Response({'details': f"Created {coop_count} Companies, {user_count} Managing Users"}) except Exception as e: - return Response({"errors": {"details": str(type(e))}}) + return Response({"errors": {"details": f'{type(e)}: {e}'}}) @api_view(['GET',]) diff --git a/products/tests.py b/products/tests.py index 745875b..720ea1b 100644 --- a/products/tests.py +++ b/products/tests.py @@ -533,7 +533,6 @@ class ProductSearchTest(TestCase): # check prices self.assertTrue(payload['prices']['min'] <= payload['prices']['max']) - def test_anon_user_can_paginate_search(self): expected_instances = [ self.factory(tags="lunares/rojos", category='zapatos', description="zapatos verdes"), diff --git a/products/views.py b/products/views.py index 8e45772..efc40c6 100644 --- a/products/views.py +++ b/products/views.py @@ -212,7 +212,6 @@ def product_search(request): for chunk in chunks: product_set, min_price, max_price = find_related_products_v6(chunk, shipping_cost, discount, category, tags, price_min, price_max) # update price values - # import ipdb; ipdb.set_trace() if prices['min'] is None or min_price['price__min'] < prices['min']: prices['min'] = min_price['price__min'] if prices['max'] is None or max_price['price__max'] > prices['max']: From fb2da7d2cfaf4c2cc38fcaa6774c9afe2231d405 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 11:52:17 +0000 Subject: [PATCH 6/8] fixed error in serialier change ofr product search --- products/serializers.py | 2 +- products/tests.py | 2 ++ products/views.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/products/serializers.py b/products/serializers.py index 68de558..7195e8f 100644 --- a/products/serializers.py +++ b/products/serializers.py @@ -11,7 +11,7 @@ class ProductSerializer(TaggitSerializer, serializers.ModelSerializer): tags = TagListSerializerField(required=False) category = SingleTagSerializerField(required=False) # main tag category attributes = TagListSerializerField(required=False) - + company = CompanySerializer(read_only=True) class Meta: model = Product fields = '__all__' diff --git a/products/tests.py b/products/tests.py index 720ea1b..81f7916 100644 --- a/products/tests.py +++ b/products/tests.py @@ -311,6 +311,8 @@ class ProductViewSetTest(APITestCase): # Assert instance has been modified for key in data: + if key == 'company': + continue self.assertEqual(data[key], response.data[key]) def test_auth_user_cannot_modify_other_users_instance(self): diff --git a/products/views.py b/products/views.py index efc40c6..697203a 100644 --- a/products/views.py +++ b/products/views.py @@ -194,7 +194,7 @@ def product_search(request): return Response({"errors": {"details": "No query string to parse"}}) elif q is '': # return everything - serializer = SearchResultSerializer(Product.objects.all(), many=True) + serializer = ProductSerializer(Product.objects.all(), many=True) products = serializer.data # filters = extract_search_filters(products) return Response(data={"filters": [], "count": len(products), "products": products}) From 874068881de82edeed1fcb6869cd23ac69c8ccb4 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 12:47:23 +0000 Subject: [PATCH 7/8] improvements to csv loading code --- core/tests.py | 4 ++-- core/utils.py | 46 +++++++++++++++++++++++++++++++++++------- datasets/test_coop.csv | 18 ++++++++--------- 3 files changed, 50 insertions(+), 18 deletions(-) diff --git a/core/tests.py b/core/tests.py index 10312d3..6a249ae 100644 --- a/core/tests.py +++ b/core/tests.py @@ -452,8 +452,8 @@ class LoadCoopManagerTestCase(APITestCase): # send in request response = self.client.post(self.endpoint, files) - - # check re sponse + import ipdb; ipdb.set_trace() + # check response self.assertEqual(response.status_code, 200) # check for object creation self.assertEquals(company_count + 5, self.company_model.objects.count()) diff --git a/core/utils.py b/core/utils.py index b94035d..14a363f 100644 --- a/core/utils.py +++ b/core/utils.py @@ -8,6 +8,7 @@ from django.template.loader import render_to_string from django.core.mail import EmailMessage from django.contrib.auth.tokens import PasswordResetTokenGenerator from django.conf import settings +from django.core.validators import validate_email, EmailValidator, URLValidator, ValidationError from rest_framework_simplejwt.tokens import RefreshToken @@ -101,7 +102,7 @@ def reformat_google_taxonomy(file_name): destination_file.write(line) -def coop_loader(csv_reader, request): +def coop_loader(csv_reader, request=None): """ Parse csv data and extract: @@ -113,16 +114,45 @@ def coop_loader(csv_reader, request): coop_counter = 0 user_counter = 0 for row in csv_reader: + # trim strings + for key in row: + if row[key]: row[key] = row[key].strip() + # import ipdb; ipdb.set_trace() if '' in (row['cif'], row['nombre-coop'], row['email']): logging.error(f"Required data missing: {row}") continue + # validate email + try: + validate_email(row['email']) + except ValidationError: + logging.warning(f"Invalid email value '{row['email']}', skipped") + continue + # validate URLs + if row['url'].startswith('http') is not True: + row['url'] = 'http://' + row['url'] + if row['logo-url'].startswith('http') is not True: + row['logo-url'] = 'http://' + row['logo-url'] + validator = URLValidator() + try: + validator(row['url']) + except ValidationError: + logging.warning(f"Invalid url value '{row['url']}', skipped") + row['url'] = None + try: + validator(row['logo-url']) + except ValidationError: + logging.warning(f"Invalid url value '{row['logo-url']}', skipped") + row['logo-url'] = None + # validate boolean + + # create instances try: coop_data = { - 'cif': row['cif'].strip(), - 'company_name': row['nombre-coop'].strip(), - 'short_name': row['nombre-corto'].strip(), - 'shop': bool(row['es-tienda'].strip()), - 'shop_link': row['url'].strip(), + 'cif': row['cif'], + 'company_name': row['nombre-coop'], + 'short_name': row['nombre-corto'], + 'shop': bool(row['es-tienda']), + 'shop_link': row['url'], } coop = Company.objects.create(**coop_data) logging.info(f"Created Coop: {coop_data}") @@ -130,10 +160,12 @@ def coop_loader(csv_reader, request): coop_user = User.objects.create_user(email=row['email'], company=coop, role='COOP_MANAGER', is_active=False) # send confirmation email - send_verification_email(request, coop_user) + if request is not None: + send_verification_email(request, coop_user) logging.info(f"Created User: {coop_user}") user_counter += 1 except Exception as e: + import ipdb; ipdb.set_trace() logging.error(f"Could not parse {row}") return coop_counter, user_counter diff --git a/datasets/test_coop.csv b/datasets/test_coop.csv index 6080b1e..87cacbd 100644 --- a/datasets/test_coop.csv +++ b/datasets/test_coop.csv @@ -1,9 +1,9 @@ -email,cif,nombre-coop,nombre-corto,url,es-tienda -, 1223432214L, FEWQ4FEWQ COOP, fc, tienda1.com, True -dsfds@mail.com,, FEW2QFEWQ COOP, fc, tienda2.com, True -ghjhg@mail.com, 122343214L,, fc, tienda3.com, True -xcv@mail.com, 12343214L, FEWQ2FEWQ COOP,, tienda4.com, True -cvc@mail.com, 1879783214L, 2FEWQFEWQ COOP, fc,, True -bvbc@mail.com, 5653214L, FEW2QFEWQ COOP, fc, tienda6.com, -kjk@mail.com, 54326543H, FE2WQF2EWQ COOP, fc, tienda7.com, True -yuyu@mail.com, 12343214L, F2EWQFEWQ COOP, fc, tienda8.com, True \ No newline at end of file +email,cif,nombre-coop,nombre-corto,url,es-tienda,logo-url,telefono,provincia,localidad,cp,direccion +, 1223432214L, FEWQ4FEWQ COOP, fc, tienda1.com, True,,,,,, +dsfds@mail.com,, FEW2QFEWQ COOP, fc, tienda2.com, True,,,,,, +ghjhg@mail.com, 122343214L,, fc, tienda3.com, True,,,,,, +xcv@mail.com, 12343214L, FEWQ2FEWQ COOP,, tienda4.com, True,,,,,, +cvc@mail.com, 1879783214L, 2FEWQFEWQ COOP, fc,, True,,,,,, +bvbc@mail.com, 5653214L, FEW2QFEWQ COOP, fc, tienda6.com,,,,,, +kjk@mail.com, 54326543H, FE2WQF2EWQ COOP, fc, tienda7.com, True,,,,,, +yuyu@mail.com, 12343214L, F2EWQFEWQ COOP, fc, tienda8.com, True,,,,,, \ No newline at end of file From 25b8e561d33e3ba73161f0855aed8afc553cafd5 Mon Sep 17 00:00:00 2001 From: Sam Date: Fri, 26 Feb 2021 13:16:37 +0000 Subject: [PATCH 8/8] improvements to coop loader --- core/tests.py | 1 - core/utils.py | 38 ++++++++++++++++++++++++++++++++++---- utils/woocommerce.py | 2 +- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/core/tests.py b/core/tests.py index 6a249ae..bc48c2c 100644 --- a/core/tests.py +++ b/core/tests.py @@ -452,7 +452,6 @@ class LoadCoopManagerTestCase(APITestCase): # send in request response = self.client.post(self.endpoint, files) - import ipdb; ipdb.set_trace() # check response self.assertEqual(response.status_code, 200) # check for object creation diff --git a/core/utils.py b/core/utils.py index 14a363f..b4c1c8e 100644 --- a/core/utils.py +++ b/core/utils.py @@ -1,4 +1,5 @@ import logging +from io import BytesIO from django.contrib.auth import get_user_model from django.contrib.sites.shortcuts import get_current_site @@ -12,6 +13,9 @@ from django.core.validators import validate_email, EmailValidator, URLValidator, from rest_framework_simplejwt.tokens import RefreshToken +import requests +from PIL import Image +from django.core.files import File from tagulous.models import TagModel from companies.models import Company @@ -116,7 +120,7 @@ def coop_loader(csv_reader, request=None): for row in csv_reader: # trim strings for key in row: - if row[key]: row[key] = row[key].strip() + if row[key]: row[key] = row[key].strip().lower() # import ipdb; ipdb.set_trace() if '' in (row['cif'], row['nombre-coop'], row['email']): logging.error(f"Required data missing: {row}") @@ -136,14 +140,19 @@ def coop_loader(csv_reader, request=None): try: validator(row['url']) except ValidationError: - logging.warning(f"Invalid url value '{row['url']}', skipped") + logging.warning(f"Invalid url value '{row['url']}'") row['url'] = None try: validator(row['logo-url']) except ValidationError: - logging.warning(f"Invalid url value '{row['logo-url']}', skipped") + logging.warning(f"Invalid logo URL value '{row['logo-url']}'") row['logo-url'] = None # validate boolean + try: + shop = bool(row['es-tienda']) + except: + logging.warning(f"Invalid valur for es-tiends: {row['es-tienda']}") + shop = None # create instances try: @@ -151,10 +160,31 @@ def coop_loader(csv_reader, request=None): 'cif': row['cif'], 'company_name': row['nombre-coop'], 'short_name': row['nombre-corto'], - 'shop': bool(row['es-tienda']), + 'shop': shop, 'shop_link': row['url'], + 'phone': row['telefono'], + 'address': row['direccion'], } coop = Company.objects.create(**coop_data) + # image logo data + if row['logo-url'] is not None: + try: + # get image + headers={"User-Agent" : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"} + response = requests.get(row['logo-url'], stream=True, headers=headers) + assert(response.status_code==200) + response.raw.decode_content = True + image = Image.open(response.raw) + # save using File object + img_io = BytesIO() + image.save(img_io, format=image.format) + coop.logo.save(f"{coop.company_name}.{image.format.lower()}", File(img_io), save=False) + coop.save() + except AssertionError as e: + logging.error(f"Source image [{row['logo-url']}] not reachable: {response.status_code}") + except Exception as e: + logging.error(f"Could not add image to COOP {coop.company_name} from [{row['logo-url']}]: {str(e)}") + # logging.info(f"Created Coop: {coop_data}") coop_counter += 1 diff --git a/utils/woocommerce.py b/utils/woocommerce.py index be2a34c..8cc76f7 100644 --- a/utils/woocommerce.py +++ b/utils/woocommerce.py @@ -86,7 +86,7 @@ def create_imported_product(info, company, history, user): # save using File object img_io = BytesIO() image.save(img_io, format=image.format) - new.image.save(f"{new.name}-{new.sku}.jpg", File(img_io), save=False) + new.image.save(f"{new.name}-{new.sku}.{image.format.lower()}", File(img_io), save=False) new.save() except AssertionError as e: logging.error(f"Source image [{info['images'][0]['src']}] not reachable: {response.status_code}")