improvements to coop product loader

This commit is contained in:
Sam
2021-03-02 10:44:23 +00:00
parent 40bc3568c1
commit f71f880a6b
3 changed files with 42 additions and 27 deletions

View File

@@ -41,7 +41,7 @@ class Company(models.Model):
shop_rss_feed = models.URLField('RSS tienda online', null=True, blank=True)
sale_terms = models.TextField('Condiciones de venta', null=True, blank=True)
shipping_cost = models.DecimalField('Gastos de envío', max_digits=10, decimal_places=2, null=True, blank=True)
tags = TagField(force_lowercase=True,max_count=5, tree=True, null=True, blank=True)
tags = TagField(force_lowercase=True,max_count=5, tree=True, blank=True)
sync = models.BooleanField('Sincronizar tienda', default=False, null=True, blank=True)
is_validated = models.BooleanField('Validado', default=False, null=True, blank=True)
is_active = models.BooleanField('Activado', default=False, null=True, blank=True)

View File

@@ -1,5 +1,6 @@
import logging
import datetime
from decimal import Decimal
from django.db.models import Q
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity
@@ -16,6 +17,14 @@ from io import BytesIO
from django.core.files import File
logging.basicConfig(
filename='logs/csv-load.log',
filemode='w',
format='%(levelname)s:%(message)s',
level=logging.INFO,
)
def extract_search_filters(result_set):
"""
Returned object should look something like:
@@ -220,7 +229,7 @@ def find_related_products_v4(keyword):
return set(products_qs)
def product_loader(csv_reader, user=None, company=None):
def product_loader(csv_reader, user, company=None):
"""
Parse csv data and extract:
@@ -229,8 +238,16 @@ def product_loader(csv_reader, user=None, company=None):
Return counts
"""
counter = 0
# get company
if company is None and user is not None:
if user.company is not None:
company = user.company
else:
# cannot add products without a company
return None
# create historysync instance
history = HistorySync.objects.create(company=user.company, sync_date=datetime.datetime.now())
history = HistorySync.objects.create(company=company, sync_date=datetime.datetime.now())
for row in csv_reader:
# trim strings
@@ -238,37 +255,35 @@ def product_loader(csv_reader, user=None, company=None):
if row[key]:
if 'imagen' in key or 'categoria' in key:
row[key] = row[key].strip()
elif key in ['precio', 'gastos-envio']:
# dec = row[key][:-1].strip()
row[key] = row[key][:-1].strip()
else:
row[key] = row[key].strip().lower()
# check required data
if '' in (row['nombre-producto'], row['descripcion'], row['precio'], row['categoria']):
if '' in (row['nombre-producto'], row['descripcion'], row['precio'],):
logging.error(f"Required data missing: {row}")
continue
#import ipdb; ipdb.set_trace()
try:
# TODO: if tags is empty, auto-generate tags
if not company:
company = user.company
# assemble instance data
product_data = {
'company': company,
'name': row['nombre-producto'].strip(),
'description': row['descripcion'].strip(),
'url': row['url'].strip(),
'price': float(row['precio'].strip().replace(',','.')),
'shipping_cost': float(row['gastos-envio'].strip().replace(',','.')),
'shipping_terms': row['cond-envio'].strip(),
'discount': row['descuento'].strip(),
'stock': row['stock'].strip(),
'tags': row['tags'].strip(),
'category': row['categoria'].strip(),
'identifiers': row['identificadores'].strip(),
#'history': history,
'creator': user,
'company': company,
'name': row['nombre-producto'].strip(),
'description': row['descripcion'].strip(),
'url': row['url'].strip(),
'price': float(row['precio'].strip().replace(',','.')),
'shipping_cost': float(row['gastos-envio'].strip().replace(',','.')),
'shipping_terms': row['cond-envio'].strip(),
'discount': row['descuento'].strip(),
'stock': row['stock'].strip(),
'tags': row['tags'].strip(),
'category': row['categoria'].strip(),
'identifiers': row['identificadores'].strip(),
'history': history,
'creator': user,
}
product = Product.objects.create(**product_data)
# image logo data
@@ -292,8 +307,7 @@ def product_loader(csv_reader, user=None, company=None):
logging.info(f"Created Product {product.id}")
counter += 1
except Exception as e:
#import ipdb; ipdb.set_trace()
logging.error(f"Could not parse {row}")
logging.error(f"Could not parse {counter}: {str(e)}")
history.quantity = counter
history.save()

View File

@@ -81,8 +81,9 @@ def load_coop_products(request):
decoded_file = csv_file.read().decode('utf-8').splitlines()
csv_reader = csv.DictReader(decoded_file, delimiter=',')
count = product_loader(csv_reader, request.user)
return Response(f"{count} products registered for {request.user.company_name}")
if count is None:
return Response({"errors": {"details": "Authenticated user is not related to any company"}}, status=status.HTTP_406_NOT_ACCEPTABLE)
return Response(f"{count} products registered for {request.user.company.company_name}")
except Exception as e:
return Response({"errors": {"details": str(type(e))}}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)