Skip to content

Commit

Permalink
Configure docker compose db service
Browse files Browse the repository at this point in the history
karelia parse_full_list
get_division_problems fix
PgStatTuple
atcoder account renaming
fast check linked accounts
  • Loading branch information
aropan committed Feb 5, 2024
1 parent 56a640f commit b1a747a
Show file tree
Hide file tree
Showing 39 changed files with 545 additions and 142 deletions.
6 changes: 3 additions & 3 deletions configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import subprocess


def generete_password(length=40):
def random_string(length=40):
return ''.join(random.choices(list(string.ascii_letters + string.digits), k=length))


Expand All @@ -25,7 +25,7 @@ def create_logger():
def enter_value(variable, old_value):
if not old_value:
logger.info(f'Generated new value for {variable} default')
old_value = generete_password()
old_value = random_string()
if old_value == '{empty}':
old_value = ''
value = input(f'Enter {variable} [default "{old_value}"]: ')
Expand Down Expand Up @@ -96,7 +96,7 @@ def main():
run_command('docker compose run dev ./manage.py migrate')

username = enter_value('username', os.getlogin())
password = enter_value('password', generete_password(10))
password = enter_value('password', random_string(10))
email = enter_value('email', 'admin@localhost')
run_command(f'''
docker compose run dev ./manage.py createadmin
Expand Down
10 changes: 7 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,13 @@ services:
command: >
postgres
-c max_connections=50
-c checkpoint_timeout=30min
-c checkpoint_timeout=60min
-c track_activity_query_size=4096
shm_size: 2gb
-c shared_buffers=2GB
-c effective_cache_size=4GB
-c work_mem=80MB
-c maintenance_work_mem=1GB
shm_size: 7gb
restart: always
nginx:
build:
Expand All @@ -93,7 +97,7 @@ services:
ports:
- 80:80
- 443:443
command: "/bin/sh -c 'while :; do sleep 6h & wait $${!}; nginx -s reload; done & nginx -g \"daemon off;\"'"
command: "/bin/sh -c 'while :; do logrotate /etc/logrotate.conf; sleep 6h & wait $${!}; nginx -s reload; done & nginx -g \"daemon off;\"'"
restart: always
certbot:
image: certbot/certbot:latest
Expand Down
95 changes: 72 additions & 23 deletions legacy/module/karelia.snarknews.info/index.php
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
}

$base_url = $match['url'];
for ($iter = 0; ; ++$iter) {
$base_url = str_replace('&', '&', $base_url);
$n_skipped = 0;
for ($iter = 0;; ++$iter) {
if ($iter) {
if (!isset($_GET['parse_full_list'])) {
break;
Expand All @@ -17,7 +19,7 @@
function ($match) {
return ($match[2] == 'w'? $match[1] - 1 . 's' : $match[1] . 'w');
},
$url,
$base_url,
);
}

Expand All @@ -29,22 +31,24 @@ function ($match) {
break;
}

$schedule_url = "http://camp.icpc.petrsu.ru/{$query['sbname']}/schedule";
$camp = $query['sbname'];

$schedule_url = "http://camp.icpc.petrsu.ru/{$camp}/schedule";
if (DEBUG) {
echo "schedule url = $schedule_url\n";
}

$page = curlexec($schedule_url);
preg_match_all('#<b>(?P<date>[^<]*)</b>([^<]*</?(?:p|br)\s*/?>)*\s*(?:<span[^>]*>)?(?P<start_time>[0-9]+:[0-9]+)([-–\s]*(?P<end_time>[0-9]+:[0-9]+))?[-–\s]*(?:<a[^>]*>)?[^<]*(?:Contest|[A-Za-z\s]*\sRound|[A-Za-z\s]*\scontest)\s*(?:[0-9]+\s*[<(]|[^<]*</a>)#s', $page, $schedule, PREG_SET_ORDER);

if (!count($schedule) || !preg_match('/[a-z]*[0-9]{4}[a-z]*/', $query['sbname'], $year)) {
if (!preg_match('/[a-z]*(?P<year>[0-9]{4})[a-z]*/', $camp, $year)) {
if ($iter) {
break;
}
continue;
}

$year = $year[0];
$year = $year['year'];

foreach ($schedule as &$s) {
$s['date'] = preg_replace('/\s*day\s*[0-9]+|\([^\)]*\)|,/', '', $s['date']);
Expand All @@ -54,35 +58,48 @@ function ($match) {
unset($s);

if (parse_url($url, PHP_URL_HOST) == "") {
$url = 'http://' . parse_url($URL, PHP_URL_HOST) . "/" . $url;
$url = url_merge($URL, $url);
}
$page = curlexec($url);
$camp_url = $url;
$page = curlexec($camp_url);
$page = str_replace('&nbsp;', ' ', $page);
preg_match_all('#<a[^>]*href="(?P<url>[^"]*)"[^>]*>(?:\s*<[^/>]*(?:title="(?P<title>[^"]*)">)?)*\s*Day\s*(?P<day>0[0-9]+)\s*<#s', $page, $matches);
preg_match_all('#<a[^>]*href="(?P<url>[^"]*)"[^>]*>(?:\s*<[^/>]*(?:title="(?P<title>[^"]*)">)?)*\s*Day\s*(?P<day>0[0-9]+)\s*<#s', $page, $matches, PREG_SET_ORDER);

unset($prev_date);
if (empty($schedule) && empty($matches)) {
$n_skipped += 1;
if ($n_skipped > 3) {
break;
} else {
continue;
}
}
$n_skipped = 0;

$days = array();
foreach ($matches[0] as $i => $value)
$HOUR = 60 * 60;
$DAY = 24 * $HOUR;
foreach ($matches as $i => $values)
{
$url = $matches['url'][$i];
$url = $values['url'];
$url = str_replace('&amp;', '&', $url);
$page = curlexec($url);

$data = array();
$data['url'] = $url;
if (isset($matches['title'][$i])) {
$data['title'] = $matches['title'][$i];
if (!empty($values['title'])) {
$data['title'] = $values['title'];
}
if (preg_match('#<h2>(?P<title>[^,]*)(?:, (?P<date>[0-9]+\s+[^<]*))?</h2>#', $page, $match)) {
if (!isset($data['title'])) {
if (preg_match('#<h[23]>(?P<title>[^,]+)(?:,\s*(?P<date>[^<]*\b[0-9]+\b[^<]*))?</h[23]>#', $page, $match)) {
if (!isset($data['title']) || preg_match('#Contest\s*[0-9]+#', $data['title'])) {
$data['title'] = $match['title'];
}
if (isset($match['date'])) {
$data['date'] = preg_replace('#^.*,\s*([^,]*,[^,]*)$#', '\1', $match['date']);
$date = preg_replace('#^.*,\s*([^,]*,[^,]*)$#', '\1', $match['date']);
if (strtotime($date) !== false) {
$data['date'] = $date;
}
}
}
if (!isset($data['title'])) {
continue;
}

if ($i < count($schedule)) {
$s = $schedule[$i];
Expand All @@ -93,9 +110,18 @@ function ($match) {
if (isset($s['end_time'])) {
$data['end_time'] = $s['end_time'];
}
} else {
$data['start_time'] = '10:00';
if (empty($schedule) && !isset($data['date'])) {
$season = substr($camp, -1);
if ($season == 'w') {
$data['date'] = strftime('%B %d, %Y', strtotime("$year-02-27") + ($i - count($matches) + 1) * $DAY);
} else if ($season == 's') {
$data['date'] = strftime('%B %d, %Y', strtotime("$year-08-30") + ($i - count($matches) + 1) * $DAY);
}
}
}

$days[intval($matches['day'][$i])] = $data;
$days[intval($values['day'])] = $data;
}
foreach ($days as $day => $data) {
if (!isset($data['date']) && isset($days[$day - 1]) && isset($days[$day - 1]['date'])) {
Expand All @@ -108,6 +134,8 @@ function ($match) {
}
}

$camp_start_time = null;
$camp_end_time = null;
foreach ($days as $day => $data) {
if (!isset($data['date'])) {
continue;
Expand All @@ -124,8 +152,16 @@ function ($match) {
echo $title . ' | ' . $date . "\n";
}

$key = $camp . '-day-' . $day;

$start_time = isset($data['start_time'])? $date . ' ' . $data['start_time'] : $date;
if (empty($camp_start_time)) {
$camp_start_time = $start_time;
}
$camp_end_time = strtotime($start_time) + 2 * $DAY;

$contests[] = array(
'start_time' => isset($data['start_time'])? $date . ' ' . $data['start_time'] : $date,
'start_time' => $start_time,
'end_time' => isset($data['end_time'])? $date . ' ' . $data['end_time'] : '',
'duration' => isset($data['end_time'])? '' : (isset($data['start_time'])? '05:00' : '00:00'),
'title' => $title,
Expand All @@ -134,8 +170,21 @@ function ($match) {
'host' => $HOST,
'rid' => $RID,
'timezone' => $TIMEZONE,
'key' => $date,
'key' => $key,
);
}

$contests[] = array(
'start_time' => $camp_start_time,
'end_time' => $camp_end_time,
'title' => "Petrozavodsk Programming Camp $camp",
'url' => $camp_url,
'standings_url' => $camp_url,
'host' => $HOST,
'rid' => $RID,
'timezone' => $TIMEZONE,
'key' => $camp,
'info' => array('series' => 'ptzcamp'),
);
}
?>
25 changes: 22 additions & 3 deletions src/clist/templatetags/extras.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,18 +306,38 @@ def slug(value):
return slugify(unidecode(value)).strip('-')


def get_standings_divisions_order(contest):
problems = contest.info.get('problems', {})
if 'division' in problems:
divisions_order = list(problems.get('divisions_order', sorted(contest.info['problems']['division'].keys())))
elif 'divisions_order' in contest.info:
divisions_order = contest.info['divisions_order']
else:
divisions_order = []
return divisions_order


@register.filter
def get_division_problems(problems, info):
def get_division_problems(contest, info):
problems = contest.info.get('problems', [])
ret = []
seen_keys = set()
if 'division' in problems:
division_addition = info.get('_division_addition')
divisions = list(division_addition.keys()) if division_addition else []
division = info.get('division')
if division and division not in divisions:
divisions = [division] + divisions
for division in get_standings_divisions_order(contest):
if division not in divisions:
divisions.append(division)
for division in divisions:
if division in problems['division']:
for problem in problems['division'][division]:
problem_key = get_problem_key(problem)
if problem_key in seen_keys:
continue
seen_keys.add(problem_key)
ret.append(problem)
return ret or problems

Expand Down Expand Up @@ -387,8 +407,7 @@ def get_problem_solution(problem):
ret = {}
for contest in problem.contests.all():
for statistic in contest.statistics_set.all():
problems = contest.info.get('problems', [])
problems = get_division_problems(problems, statistic.addition)
problems = get_division_problems(contest, statistic.addition)
group_scores = defaultdict(int)

for p in problems:
Expand Down
6 changes: 3 additions & 3 deletions src/clist/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -945,7 +945,7 @@ def problems(request, template='problems.html'):
'key': {'fields': ['key__iexact']},
'contest': {'fields': ['contest__title__iregex'], 'exists': 'contests'},
'resource': {'fields': ['resource__host__iregex']},
'tag': {'fields': ['problemtag__name__iregex'], 'exists': 'tags'},
'tag': {'fields': ['problemtag__name'], 'exists': 'tags'},
'cid': {'fields': ['contest__pk'], 'exists': 'contests', 'func': lambda v: int(v)},
'rid': {'fields': ['resource_id'], 'func': lambda v: int(v)},
'pid': {'fields': ['id'], 'func': lambda v: int(v)},
Expand Down Expand Up @@ -1019,8 +1019,8 @@ def problems(request, template='problems.html'):

tags = [r for r in request.GET.getlist('tag') if r]
if tags:
problems = problems.annotate(has_tag=Exists('tags', filter=Q(problemtag__pk__in=tags)))
problems = problems.filter(has_tag=True)
for tag in tags:
problems = problems.filter(tags__pk=tag)
tags = list(ProblemTag.objects.filter(pk__in=tags))

custom_fields = [f for f in request.GET.getlist('field') if f]
Expand Down
10 changes: 9 additions & 1 deletion src/logify/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from django.urls import reverse
from django.utils.html import format_html

from logify.models import EventLog
from logify.models import EventLog, PgStatTuple
from pyclist.admin import BaseModelAdmin, admin_register


Expand All @@ -21,3 +21,11 @@ def related_object_link(self, obj):
return format_html('<a href="{}">{}</a>', url, obj.related)

related_object_link.short_description = 'Related Object'


@admin_register(PgStatTuple)
class PgStatTupleAdmin(BaseModelAdmin):
list_display = ['id', 'table_name', 'app_name', 'table_len', 'tuple_percent', 'dead_tuple_percent', 'free_percent']
list_filter = ['app_name']
search_fields = ['table_name']
ordering = ['-table_len']
8 changes: 8 additions & 0 deletions src/logify/apps.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
from django.apps import AppConfig

from pyclist.decorators import run_once, run_only_in_production


class LogifyConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'logify'

@run_only_in_production
@run_once('logify_ready')
def ready(self):
from logify.models import EventLog, EventStatus
EventLog.objects.filter(status=EventStatus.IN_PROGRESS).update(status=EventStatus.INTERRUPTED)
42 changes: 42 additions & 0 deletions src/logify/management/commands/update_pgstattuple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
#!/usr/bin/env python3

import re
from logging import getLogger

import tqdm
from django.core.management.base import BaseCommand
from django.db import connection

from logify.models import PgStatTuple
from utils.attrdict import AttrDict
from utils.db import dictfetchone, find_app_by_table


class Command(BaseCommand):
help = 'Updates the PgStatTuple table with fresh data from pgstattuple'

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.logger = getLogger('logify.update_pgstattuple')

def add_arguments(self, parser):
parser.add_argument('-n', '--limit', type=int, help='number of tables')
parser.add_argument('-f', '--search', type=str, help='search tables')

def handle(self, *args, **options):
self.stdout.write(str(options))
args = AttrDict(options)

with connection.cursor() as cursor:
cursor.execute("SELECT tablename FROM pg_tables WHERE schemaname='public'")
tables = [row[0] for row in cursor.fetchall()]
if args.limit:
tables = tables[:args.limit]
if args.search:
tables = [table for table in tables if re.search(args.search, table)]

for table in tqdm.tqdm(tables, desc='tables'):
cursor.execute(f"SELECT * FROM pgstattuple('{table}')")
stats = dictfetchone(cursor)
defaults = {'app_name': find_app_by_table(table), **stats}
PgStatTuple.objects.update_or_create(table_name=table, defaults=defaults)
Loading

0 comments on commit b1a747a

Please sign in to comment.