Merge branch 'development' of https://bitbucket.org/enreda/back-latienda into diego

This commit is contained in:
Diego Calvo
2021-02-16 12:08:59 +01:00
12 changed files with 161 additions and 62 deletions

View File

@@ -5,11 +5,17 @@ This README aims to document functionality of backend as well as required steps
## Table of Contents
- [First Steps](#first-steps)
- [Location Data](#location-data)
- [Load location data](#load-location-data)
- [Load taxonomy data](#load-taxonomy-data)
- [Endpoints](#endpoints)
- [Data Load](#data-load)
- [Product Search](#product-search)
- [Massive Data Load Endpoints](#massive-data-load-endpoints)
- [COOP and Managing User Data Load](#coop-and-managing-user-data-load)
- [Product Data Load](#product-data-load)
- [GeoIP Setup](#geoip-setup)
- [Development Utils](#development-utils)
- [Fake product data generation](#fake-product-data-generation)
## First Steps
@@ -35,13 +41,20 @@ python manage.py migrate
- Start server in development mode: `python manage.py runserver`
## Location data
### Load Location Data
To load initial location data use: `python manage.py loadgisdata`
## Endpoints
### Load Taxonomy Data
This data serves as initial Tags
To load initial set of tags: `python manage.py addtaxonomy`
## Endpoints
### User Management
@@ -146,7 +159,7 @@ Endpoint url: `/api/v1/stats/`
logs about user interaction with products links
### Geo location
### Locations
Location ednpoints:
@@ -156,8 +169,33 @@ Location ednpoints:
- `/api/v1/cities/`
## Load Data
## Product Search
Endpoint: `/api/v1/product_search/`
Query parameters:
- `query_string`: text from the search input box
Response format:
```json
{
"filters": {
"singles": ["tag1", "tag2"], // for tags that aren't nested
"entry_1": ["subtag_1", "subtag_2"], // for tree tags like entry_1/subtag_1
"entry_2": ["subtag_1", "subtag_2"] // one per penultimate tag in tree
},
"products" : [], // list of serialized instances, in order of relevancy
}
```
Check out `products.tests..ProductSearchTest` for a practical case.
## Massive Data Load Endpoints
### COOP and Managing User Data Load
@@ -178,7 +216,6 @@ CSV headers: `id,nombre-producto,descripcion,imagen,url,precio,gastos-envio,cond
Only admin users have access to endoint
## GeoIP Setup
Module: `geoip2`
@@ -196,7 +233,7 @@ Optional:
## Development Utils
### Fake product load
### Fake product data generation
To create a dataset of fake companies and products:

View File

@@ -21,9 +21,11 @@ DATABASES = {
},
}
MEDIA_ROOT = BASE_DIR + '/../media/'
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR + '/../media/'
GEOIP_PATH = BASE_DIR + '/../datasets/'
# MEDIA_ROOT = os.path.join(BASE_DIR, '/../media/')
# GEOIP_PATH = os.path.join(BASE_DIR, '/../datasets/')
# JWT SETTINGS
SIMPLE_JWT = {

View File

@@ -2,6 +2,8 @@
from django.contrib.gis.db import models
from tagulous.models import TagField
# from core.models import TreeTag
# Create your models here.

View File

@@ -4,11 +4,12 @@ from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import TreeTag
from products.models import Product
class Command(BaseCommand):
help = 'Load taxonomy terms into Tags'
help = 'Load taxonomy terms into Product.tags'
def handle(self, *args, **kwargs):
@@ -22,11 +23,11 @@ class Command(BaseCommand):
print(f"Reading from {settings.TAXONOMY_FILE}")
for line in data_file.readlines():
try:
tag = TreeTag.objects.create(name=line)
tag = Product.tags.tag_model.objects.create(name=line)
counter += 1
print('.', end='')
logging.debug(f"{tag} created from {line}")
except Exception as e:
logging.error(f"{type(e)} while creating tags from {settings.TAXONOMY_FILE}")
print(f"\n{counter} new TreeTag instances created")
print(f"\nAdded {counter} Tag objects to Product.tags")
print('Shutting down\n')

View File

@@ -1,12 +1,18 @@
import logging
import json
import shutil
from io import BytesIO
import requests
from django.core.files import File
from django.core.management.base import BaseCommand
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from faker import Faker
from PIL import Image
from companies.factories import CompanyFactory
from companies.models import Company
@@ -21,9 +27,10 @@ logging.basicConfig(
level=logging.INFO,
)
class Command(BaseCommand):
logo_url = "https://picsum.photos/200/300"
logo_url = "https://picsum.photos/300/200"
help = 'Creates fake companies and related products in database'
def handle(self, *args, **kwargs):
@@ -53,27 +60,30 @@ class Command(BaseCommand):
# create and assign products to companies
for company in new_companies:
print("Creating fake products for {company.company_name}")
logging.info(f"Creating Products for {company.company_name}")
for i in range(100):
print(f"Creating fake products for {company.company_name}")
logging.info(f"Creating fake Products for {company.company_name}")
# for i in range(100):
for i in range(10):
# make up data
name = fake.last_name_nonbinary()
description = fake.paragraph(nb_sentences=5)
# TODO: apply tags from tag list
image= None
"""
# TODO: apply automatic tags from tag list
# TODO: write image to S3 storage
response = requests.get(self.logo_url)
if response.status_code == 200:
response.raw.decode_content = True
image = response.raw.read()
else:
logging.warning(f"Got {response.status_code} querying {self.logo_url}")
"""
# create instance
product = ProductFactory(name=name, description=description)
# get image
response = requests.get(self.logo_url, stream=True)
response.raw.decode_content = True
image = Image.open(response.raw)
# save using File object
img_io = BytesIO()
image.save(img_io, format='JPEG')
product.image.save(f"{company.company_name}-{name}.jpg", File(img_io), save=False)
product.save()
product = ProductFactory(name=name, description=description, image=image)
logging.debug(f"New Product {product.name} created")
print("*", end = '.')
print('')
print("Dataset creation finished")

View File

@@ -131,3 +131,7 @@ class Command(BaseCommand):
logging.info(f"Region instances created: {region_counter}")
logging.info(f"Province instances created: {province_counter}")
logging.info(f"City instances created: {city_counter}")
print(f"Country instances created: {country_counter}")
print(f"Region instances created: {region_counter}")
print(f"Province instances created: {province_counter}")
print(f"City instances created: {city_counter}")

View File

@@ -76,6 +76,7 @@ class CustomUser(AbstractBaseUser, PermissionsMixin):
class TreeTag(TagTreeModel):
class TagMeta:
# initial = "food/eating, food/cooking, gaming/football"
initial = ""
force_lowercase = True
max_count=20
# autocomplete_view = 'myapp.views.hobbies_autocomplete'

View File

@@ -2,16 +2,11 @@ from django.contrib.gis.db import models
from tagulous.models import SingleTagField, TagField, TagTreeModel
from core.models import TreeTag
from companies.models import Company
# Create your models here.
class MyTreeTags(TagTreeModel):
class TagMeta:
initial = "colors/blue, colors/red, colors/green"
force_lowercase = True
# autocomplete_view = 'myapp.views.hobbies_autocomplete'
class Product(models.Model):
@@ -39,9 +34,9 @@ class Product(models.Model):
update_date = models.DateTimeField('Fecha de actualización de producto', null=True, blank=True)
discount = models.DecimalField('Descuento', max_digits=5, decimal_places=2, null=True, blank=True)
stock = models.PositiveIntegerField('Stock', null=True)
tags = TagField(force_lowercase=True, max_count=20, tree=True)
tags = TagField(to=TreeTag)
category = SingleTagField(null=True) # main tag category
attributes = TagField(force_lowercase=True, max_count=20, tree=True)
attributes = TagField(to=TreeTag, related_name='product_attributes')
identifiers = models.TextField('Identificador único de producto', null=True, blank=True)
# internal

View File

@@ -461,25 +461,30 @@ class ProductSearchTest(TestCase):
def test_anon_user_can_search(self):
expected_instances = [
self.factory(description="zapatos verdes"),
self.factory(tags="rojos"),
self.factory(tags="lunares/blancos",description="zapatos verdes"),
self.factory(tags="colores/rojos, tono/brillante"),
self.factory(tags="lunares/azules", description="zapatos rojos"),
self.factory(tags="lunares/rojos", description="zapatos"),
self.factory(attributes='"zapatos de campo", tono/oscuro'),
]
unexpected_instances = [
self.factory(description="chanclas"),
self.factory(tags="azules"),
]
self.factory(tags="azul")
query_string = quote("zapatos rojos")
url = f"{self.endpoint}?query_string={query_string}"
# send in request
response = self.client.get(url)
payload = response.json()
# check response
self.assertEqual(response.status_code, 200)
# check for object creation
self.assertEquals(len(response.data['products']), len(expected_instances))
self.assertEquals(len(payload['products']), len(expected_instances))
# check for filters
self.assertNotEquals([], payload['filters']['singles'])
self.assertTrue(len(payload['filters']) >= 2 )
class MyProductsViewTest(APITestCase):

View File

@@ -1,9 +1,51 @@
import logging
def extract_search_filters(result_set):
filters = set()
"""
Returned object should look something like:
{
"singles": [], # non tree tags
"entry_1": [ 'tag1', 'tag2' ],
"entry_2": [ 'tag1', 'tag2' ],
}
"""
filter_dict = {
'singles': set(),
}
for item in result_set:
tags = item.tags.all()
for tag in tags:
filters.add(tag.name)
return list(filters)
try:
# extract tags
tags = item.tags.all()
for tag in tags:
if len(tag.name.split('/')) == 1:
filter_dict['singles'].add(tag.name)
else:
# set penultimate tag as header
chunks = tag.name.split('/')
header = chunks[-2]
name = chunks[-1]
# check if
entry = filter_dict.get(header)
if entry is None:
filter_dict[header] = set()
filter_dict[header].add(name)
# extract attributes
attributes = item.attributes.all()
for tag in attributes:
if len(tag.name.split('/')) == 1:
filter_dict['singles'].add(tag.name)
else:
# set penultimate tag as header
chunks = tag.name.split('/')
header = chunks[-2]
name = chunks[-1]
# check if
entry = filter_dict.get(header)
if entry is None:
filter_dict[header] = set()
filter_dict[header].add(name)
except Exception as e:
logging.error(f'Extacting filters for {item}')
return filter_dict

View File

@@ -155,23 +155,23 @@ def product_search(request):
chunks = query_string.split(' ')
for chunk in chunks:
# search inside name and description
products = Product.objects.filter(Q(name__icontains=chunk) | Q(description__icontains=chunk))
for item in products:
result_set.add(item)
# search in tags
products = Product.objects.filter(tags=chunk)
for item in products:
result_set.add(item)
tags = Product.tags.tag_model.objects.filter(name__icontains=chunk)
# search in category
products = Product.objects.filter(category=chunk)
for item in products:
result_set.add(item)
categories = Product.category.tag_model.objects.filter(name__icontains=chunk)
# search in attributes
products = Product.objects.filter(attributes=chunk)
for item in products:
result_set.add(item)
attributes = Product.attributes.tag_model.objects.filter(name__icontains=chunk)
# unified tag search
products_qs = Product.objects.filter(
Q(name__icontains=chunk)|
Q(description__icontains=chunk)|
Q(tags__in=tags)|
Q(category__in=categories)|
Q(attributes__in=attributes)
)
for instance in products_qs:
result_set.add(instance)
# extract filters from result_set
filters = extract_search_filters(result_set)
# serialize and respond