Skip to content
Snippets Groups Projects
Commit 8b97379e authored by Piotr Maślanka's avatar Piotr Maślanka
Browse files

fix everything, prepare to deployment

parent 3cbd247a
No related branches found
No related tags found
No related merge requests found
Pipeline #52555 passed with stages
in 1 minute and 50 seconds
...@@ -15,16 +15,10 @@ unittest: ...@@ -15,16 +15,10 @@ unittest:
- docker-compose down - docker-compose down
build_nginx:
script:
- DOCKERIT_NO_BRANCH=1 docker-it nginx zoo.smok.co/henrietta/netguru/nginx deployment/nginx
only:
- master
build: build:
stage: build stage: build
script: script:
- DOCKERIT_NO_BRANCH=1 docker-it netguru zoo.smok.co/henrietta/netguru/backend . --target runtime - DOCKERIT_NO_BRANCH=1 docker-it netguru zoo.smok.co/henrietta/netguru .
deploy: deploy:
stage: deploy stage: deploy
......
...@@ -21,7 +21,6 @@ I know that I'm ignoring specification, and you are free to call me out ...@@ -21,7 +21,6 @@ I know that I'm ignoring specification, and you are free to call me out
on that - but as I got some experience with frontend, on that - but as I got some experience with frontend,
I'd rather do **the right thing**. I'd rather do **the right thing**.
## Documentation ## Documentation
I couldn't get the DRF documentation to cooperate with me (most I couldn't get the DRF documentation to cooperate with me (most
...@@ -54,7 +53,7 @@ I realize that I would be best practice to deduplicate some code contained ...@@ -54,7 +53,7 @@ I realize that I would be best practice to deduplicate some code contained
within tests, but since I'm running about 1,5 of full-time equvialents you just have to forgive me within tests, but since I'm running about 1,5 of full-time equvialents you just have to forgive me
for sparing the effort to do so. Thanks "from the mountain!" for sparing the effort to do so. Thanks "from the mountain!"
# The [reaper job](counting/cron.py) # The [reaper job](counting/cron.py#L27)
I came up with the reaper job trying to think up a reasonable solution I came up with the reaper job trying to think up a reasonable solution
that wouldn't load the processing server too much. It processes way-past-due that wouldn't load the processing server too much. It processes way-past-due
...@@ -62,7 +61,7 @@ links into a nice compact representation and stores them in the database. ...@@ -62,7 +61,7 @@ links into a nice compact representation and stores them in the database.
Two last days (since the job fires only every 24 hours) are computed on-the-fly, Two last days (since the job fires only every 24 hours) are computed on-the-fly,
and cached as necessary, for up to 5 minutes. and cached as necessary, for up to 5 minutes.
# Nginx serving static content # Nginx not serving static content
Since it's not ideal for Django to be serving large static files, Since it's not ideal for Django to be serving large static files,
I tried to optimize it as much as possible by using a streaming iterator. I tried to optimize it as much as possible by using a streaming iterator.
......
...@@ -4,7 +4,7 @@ from django.db import models ...@@ -4,7 +4,7 @@ from django.db import models
class UserAgentStorage(models.Model): class UserAgentStorage(models.Model):
user = models.ForeignKey('auth.User', verbose_name='User', db_index=True, user = models.ForeignKey('auth.User', verbose_name='User', db_index=True,
on_delete=models.CASCADE) on_delete=models.CASCADE)
ua = models.TextField(verbose_name='User agent', null=True, blank=True) ua = models.TextField(verbose_name='User agent')
def __str__(self): def __str__(self):
return f'{self.user} - {self.ua}' return f'{self.user} - {self.ua}'
...@@ -27,10 +27,8 @@ class DayType(enum.IntEnum): ...@@ -27,10 +27,8 @@ class DayType(enum.IntEnum):
class ReaperJob(CronJobBase): class ReaperJob(CronJobBase):
""" """
Reaper's job is to collect dead links within the database and compile a history out of them. Reaper's job is to collect dead links within the database and compile a history out of them.
It will delete only links from previous days, only if they have already expired. It will delete only links from previous days, and only for days that have all of their
links expired.
Let's talk a moment about it's logic - days can be divided into one of 3 categories
displayed above in DayType
""" """
RUN_EVERY_MINS = 24 * 60 # once each day RUN_EVERY_MINS = 24 * 60 # once each day
...@@ -46,6 +44,7 @@ class ReaperJob(CronJobBase): ...@@ -46,6 +44,7 @@ class ReaperJob(CronJobBase):
StoryOfADay.objects.get(date=date) StoryOfADay.objects.get(date=date)
return DayType.DAY_WITH_HISTORY return DayType.DAY_WITH_HISTORY
except StoryOfADay.DoesNotExist: except StoryOfADay.DoesNotExist:
# this is going to do the right thing even if the iterator's empty
if all(item.expired for item in Share.objects.get_for_day(date)): if all(item.expired for item in Share.objects.get_for_day(date)):
return DayType.DAY_UNPROCESSED_BUT_PROCESSABLE return DayType.DAY_UNPROCESSED_BUT_PROCESSABLE
else: else:
...@@ -65,11 +64,10 @@ class ReaperJob(CronJobBase): ...@@ -65,11 +64,10 @@ class ReaperJob(CronJobBase):
files += 1 files += 1
else: else:
links += 1 links += 1
sod = StoryOfADay(day=cur_day, links=links, sod = StoryOfADay(day=cur_day, links=links, files=files)
files=files) logger.info('Historic info for %s compiled, %s files visited, %s links visited',
logger.info('Historic info for %s compiled, %s files visited, %s links visited', cur_day, files, links)
cur_day, files, links) entries_compiled.runtime(+1)
entries_compiled.runtime(+1) sod.save()
sod.save() share.delete()
share.delete()
cur_day = cur_day + datetime.timedelta(days=1) cur_day = cur_day + datetime.timedelta(days=1)
FROM nginx
ADD conf.d/ /etc/nginx/conf.d/
...@@ -152,7 +152,6 @@ REST_FRAMEWORK = { ...@@ -152,7 +152,6 @@ REST_FRAMEWORK = {
'UNICODE_JSON': True, 'UNICODE_JSON': True,
} }
# Configure tracing # Configure tracing
# ================= # =================
OPENTRACING_TRACE_ALL = True OPENTRACING_TRACE_ALL = True
......
# Generated by Django 3.2.6 on 2021-08-26 23:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shares', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='share',
name='resource',
field=models.TextField(
help_text='URL if this is an URL. Otherwise it will be name of the file as it is on filesystem, then a dot,and then the file name the user submitted it as',
verbose_name='Resource'),
),
]
...@@ -75,7 +75,8 @@ class TestShares(TestCase): ...@@ -75,7 +75,8 @@ class TestShares(TestCase):
share = Share.objects.get(id=response.json()['url'].rsplit('/', 1)[-1]) share = Share.objects.get(id=response.json()['url'].rsplit('/', 1)[-1])
share.created_on = datetime.datetime.now() - datetime.timedelta(days=2) share.created_on = datetime.datetime.now() - datetime.timedelta(days=2)
share.save() share.save()
response = self.api_client.post(f'http://127.0.0.1/api/get/{share.id}', {'password': response.json()['password']}, response = self.api_client.post(f'http://127.0.0.1/api/get/{share.id}',
{'password': response.json()['password']},
format='json') format='json')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
......
...@@ -2,5 +2,4 @@ ...@@ -2,5 +2,4 @@
set -e set -e
sleep 3 # wait for postgres to start up sleep 3 # wait for postgres to start up
python manage.py makemigrations shares
python manage.py test --no-input python manage.py test --no-input
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment