Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package MirrorCache for openSUSE:Factory checked in at 2024-12-20 15:28:37 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/MirrorCache (Old) and /work/SRC/openSUSE:Factory/.MirrorCache.new.1881 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "MirrorCache" Fri Dec 20 15:28:37 2024 rev:48 rq:1232817 version:1.089 Changes: -------- --- /work/SRC/openSUSE:Factory/MirrorCache/MirrorCache.changes 2024-12-10 23:43:41.732961887 +0100 +++ /work/SRC/openSUSE:Factory/.MirrorCache.new.1881/MirrorCache.changes 2024-12-20 15:29:04.332195413 +0100 @@ -1,0 +2,8 @@ +Fri Dec 13 11:29:33 UTC 2024 - Andrii Nikitin <[email protected]> + +- Update to version 1.089: + * Fix presense in header x-media-version when empty (#548) + * Add /app/efficiency reporting cache hit rate (#547) + * Do not verify redirects of unversioned files on HEAD requests (#546) + +------------------------------------------------------------------- Old: ---- MirrorCache-1.088.obscpio New: ---- MirrorCache-1.089.obscpio ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ MirrorCache.spec ++++++ --- /var/tmp/diff_new_pack.mBzrj3/_old 2024-12-20 15:29:06.084267605 +0100 +++ /var/tmp/diff_new_pack.mBzrj3/_new 2024-12-20 15:29:06.096268100 +0100 @@ -22,7 +22,7 @@ %define main_requires %{assetpack_requires} perl(Carp) perl(DBD::Pg) >= 3.7.4 perl(DBI) >= 1.632 perl(DBIx::Class) >= 0.082801 perl(DBIx::Class::DynamicDefault) perl(DateTime) perl(Encode) perl(Time::Piece) perl(Time::Seconds) perl(Time::ParseDate) perl(DateTime::Format::Pg) perl(Exporter) perl(File::Basename) perl(LWP::UserAgent) perl(Mojo::Base) perl(Mojo::ByteStream) perl(Mojo::IOLoop) perl(Mojo::JSON) perl(Mojo::Pg) perl(Mojo::URL) perl(Mojo::Util) perl(Mojolicious::Commands) perl(Mojolicious::Plugin) perl(Mojolicious::Plugin::RenderFile) perl(Mojolicious::Static) perl(Net::OpenID::Consumer) perl(POSIX) perl(Sort::Versions) perl(URI::Escape) perl(XML::Writer) perl(base) perl(constant) perl(diagnostics) perl(strict) perl(warnings) shadow rubygem(sass) perl(Net::DNS) perl(LWP::Protocol::https) perl(Digest::SHA) perl(Config::IniFiles) %define build_requires %{assetpack_requires} rubygem(sass) tidy sysuser-shadow sysuser-tools Name: MirrorCache -Version: 1.088 +Version: 1.089 Release: 0 Summary: WebApp to redirect and manage mirrors License: GPL-2.0-or-later ++++++ MirrorCache-1.088.obscpio -> MirrorCache-1.089.obscpio ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/assets/assetpack.def new/MirrorCache-1.089/assets/assetpack.def --- old/MirrorCache-1.088/assets/assetpack.def 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/assets/assetpack.def 2024-12-13 12:17:01.000000000 +0100 @@ -133,6 +133,7 @@ < https://cdnjs.cloudflare.com/ajax/libs/jquery-ujs/1.2.1/rails.js < javascripts/disable_animations.js [mode==test] < https://raw.githubusercontent.com/sorich87/bootstrap-tour/6a1028fb562f9aa68c451f0901f8cfeb43cad140/build/js/bootstrap-tour.min.js +< https://cdn.plot.ly/plotly-basic-2.35.2.min.js ! navigation.css < stylesheets/navigation.scss diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/dist/rpm/update-cache.sh new/MirrorCache-1.089/dist/rpm/update-cache.sh --- old/MirrorCache-1.088/dist/rpm/update-cache.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/dist/rpm/update-cache.sh 2024-12-13 12:17:01.000000000 +0100 @@ -1,20 +1,24 @@ -#! /bin/sh +#!/usr/bin/bash + +# requirements: +# bzip2 cpio perl-IO-Socket-SSL perl-Mojolicious +# perl-Mojolicious-Plugin-AssetPack ruby3.3-rubygem-sass set -e export LC_ALL='en_US.UTF-8' export LANG='en_US.UTF-8' -osc up -rm -f _service\:* -rm -f *.tar *.cpio -osc service lr -# special call for tar buildtime service -osc service lr tar - -SD=$PWD -cd MirrorCache -tools/generate-packed-assets -tar cvjf ../cache.tar.xz assets/cache assets/assetpack.db -cd "$SD" -osc up +mkdir -p MirrorCache-update-cache +rm -rf MirrorCache-update-cache/* +pushd MirrorCache-update-cache + +cpio -id < ../MirrorCache-*.obscpio +pushd MirrorCache-* + +./tools/generate-packed-assets +tar cvjf ../../cache.tar.xz assets/cache assets/assetpack.db + +popd +popd +rm -rf MirrorCache-update-cache/* diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/Schema/ResultSet/Stat.pm new/MirrorCache-1.089/lib/MirrorCache/Schema/ResultSet/Stat.pm --- old/MirrorCache-1.088/lib/MirrorCache/Schema/ResultSet/Stat.pm 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/Schema/ResultSet/Stat.pm 2024-12-13 12:17:01.000000000 +0100 @@ -277,4 +277,134 @@ return $prev_stat_id; } + +my $SQLEFFICIENCY_HOURLY_PG = <<"END_SQL"; +select +extract(epoch from now())::integer as dt, +hit_minute + coalesce(hit,0) as hit, +miss_minute + coalesce(miss,0) as miss, +pass_minute + coalesce(pass,0) as pass, +geo_minute + coalesce(geo,0) as geo, +bot_minute + coalesce(bot,0) as bot +from +( +select +sum(case when mirror_id > 0 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as hit_minute, +sum(case when mirror_id = -1 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as miss_minute, +sum(case when mirror_id = 0 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as pass_minute, +sum(case when mirror_id < -1 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as geo_minute, +sum(case when (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as bot_minute +from ( +select lastdt from (select dt as lastdt from stat_agg where period = 'minute' order by dt desc limit 1) x union select CURRENT_TIMESTAMP(3) - interval '1 hour' limit 1 +) lastagg join stat on dt > lastdt +) agg_minute +left join +( +select +sum(case when mirror_id > 0 then hit_count else 0 end) as hit, +sum(case when mirror_id = -1 then hit_count else 0 end) as miss, +sum(case when mirror_id = 0 then hit_count else 0 end) as pass, +sum(case when mirror_id < -1 and mirror_id != -100 then hit_count else 0 end) as geo, +sum(case when mirror_id = -100 then hit_count else 0 end) as bot +from stat_agg +where +period = 'minute' +and dt <= (select dt from stat_agg where period = 'minute' order by dt desc limit 1) +and dt > date_trunc('hour', CURRENT_TIMESTAMP(3)) +) agg_hour on 1=1 +union +select extract(epoch from dt)::integer, +sum(case when mirror_id > 0 then hit_count else 0 end) as hit, +sum(case when mirror_id = -1 then hit_count else 0 end) as miss, +sum(case when mirror_id = 0 then hit_count else 0 end) as pass, +sum(case when mirror_id < -1 and mirror_id != -100 then hit_count else 0 end) as geo, +sum(case when mirror_id = -100 then hit_count else 0 end) as bot +from stat_agg +where +period = 'hour' +and dt <= date_trunc('hour', CURRENT_TIMESTAMP(3)) and dt > CURRENT_TIMESTAMP(3) - interval '30 hour' +group by dt +order by 1 desc +limit 30 +END_SQL + + + +my $SQLEFFICIENCY_DAILY_PG = <<"END_SQL"; +select +extract(epoch from now())::integer as dt, +sum(hit) as hit, +sum(miss) as miss, +sum(pass) as pass, +sum(geo) as geo, +sum(bot) as bot +from +( +select +sum(case when mirror_id > 0 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as hit, +sum(case when mirror_id = -1 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as miss, +sum(case when mirror_id = 0 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as pass, +sum(case when mirror_id < -1 and not (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as geo, +sum(case when (lower(agent) ~ '$BOT_MASK') then 1 else 0 end) as bot +from ( +select lastdt from (select dt as lastdt from stat_agg where period = 'hour' order by dt desc limit 1) x union select date_trunc('day', CURRENT_TIMESTAMP(3)) limit 1 +) lastagg join stat on dt > lastdt +union +select +sum(case when mirror_id > 0 then hit_count else 0 end) as hit, +sum(case when mirror_id = -1 then hit_count else 0 end) as miss, +sum(case when mirror_id = 0 then hit_count else 0 end) as pass, +sum(case when mirror_id < -1 and mirror_id != -100 then hit_count else 0 end) as geo, +sum(case when mirror_id = -100 then hit_count else 0 end) as bot +from stat_agg +where +period = 'hour' +and dt <= (select dt from stat_agg where period = 'hour' order by dt desc limit 1) +and dt > date_trunc('day', CURRENT_TIMESTAMP(3)) +group by dt +) heute +union +select extract(epoch from dt)::integer, +sum(case when mirror_id > 0 then hit_count else 0 end) as hit, +sum(case when mirror_id = -1 then hit_count else 0 end) as miss, +sum(case when mirror_id = 0 then hit_count else 0 end) as pass, +sum(case when mirror_id < -1 and mirror_id != -100 then hit_count else 0 end) as geo, +sum(case when mirror_id = -100 then hit_count else 0 end) as bot +from stat_agg +where +period = 'day' +and dt <= date_trunc('day', CURRENT_TIMESTAMP(3)) and dt > CURRENT_TIMESTAMP(3) - 30 * 24 * interval '1 hour' +group by dt +order by 1 desc +limit 30 +END_SQL + + +sub select_efficiency() { + my ($self, $period, $limit) = @_; + + my $sql; + my $dbh = $self->result_source->schema->storage->dbh; + + $sql = $SQLEFFICIENCY_HOURLY_PG; + $sql = $SQLEFFICIENCY_DAILY_PG if $period eq 'day'; + + if ($dbh->{Driver}->{Name} ne 'Pg') { + $sql =~ s/date_trunc\('day', CURRENT_TIMESTAMP\(3\)\)/date(CURRENT_TIMESTAMP(3))/g; + $sql =~ s/date_trunc\('hour', CURRENT_TIMESTAMP\(3\)\)/CURDATE() + INTERVAL hour(now()) HOUR/g; + $sql =~ s/ ~ / REGEXP /g; + $sql =~ s/30 \* 24 \* interval '1 hour'/interval 30 day/g; + $sql =~ s/interval 'hour'/interval 1 hour/g; + $sql =~ s/interval '1 hour'/interval 1 hour/g; + $sql =~ s/interval '30 hour'/interval 30 hour/g; + $sql =~ s/interval 'day'/interval 1 day/g; + $sql =~ s/extract\(epoch from now\(\)\)::integer/floor(unix_timestamp(now()))/g; + $sql =~ s/extract\(epoch from dt\)::integer/floor(unix_timestamp(dt))/g; + } + my $prep = $dbh->prepare($sql); + $prep->execute(); + my $arrayref = $dbh->selectall_arrayref($prep, { Slice => {} }); + return $arrayref; +} + 1; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Controller/App/Efficiency.pm new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Controller/App/Efficiency.pm --- old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Controller/App/Efficiency.pm 1970-01-01 01:00:00.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Controller/App/Efficiency.pm 2024-12-13 12:17:01.000000000 +0100 @@ -0,0 +1,26 @@ +# Copyright (C) 2024 SUSE LLC +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, see <http://www.gnu.org/licenses/>. + +package MirrorCache::WebAPI::Controller::App::Efficiency; +use Mojo::Base 'MirrorCache::WebAPI::Controller::App::Table'; + +sub index { + my $c = shift; + + $c->render('app/efficiency/index'); +} + + +1; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Controller/Rest/Efficiency.pm new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Controller/Rest/Efficiency.pm --- old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Controller/Rest/Efficiency.pm 1970-01-01 01:00:00.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Controller/Rest/Efficiency.pm 2024-12-13 12:17:01.000000000 +0100 @@ -0,0 +1,50 @@ +# Copyright (C) 2021 SUSE LLC +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, see <http://www.gnu.org/licenses/>. + +package MirrorCache::WebAPI::Controller::Rest::Efficiency; +use Mojo::Base 'Mojolicious::Controller'; +use Mojo::Promise; +use Data::Dumper; + +sub list { + my ($self) = @_; + + my $period = $self->param('period') // 'hour'; + my $limit = 30; + + my $tx = $self->render_later->tx; + + my $rendered; + my $handle_error = sub { + return if $rendered; + $rendered = 1; + my @reason = @_; + my $reason = scalar(@reason)? Dumper(@reason) : 'unknown'; + $self->render(json => {error => $reason}, status => 500) ; + }; + + my $res; + my $p = Mojo::Promise->new->timeout(5); + $p->then(sub { + my $rs = $self->schema->resultset('Stat'); + $res = $rs->select_efficiency($period, $limit); + })->catch($handle_error)->then(sub { + $self->render(json => $res); + })->catch($handle_error); + + $p->resolve; +} + +1; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Plugin/Dir.pm new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Plugin/Dir.pm --- old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Plugin/Dir.pm 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Plugin/Dir.pm 2024-12-13 12:17:01.000000000 +0100 @@ -707,7 +707,7 @@ } else { $media_version = join(',', sort @versions); } - $c->res->headers->add('X-MEDIA-VERSION' => $media_version); + $c->res->headers->add('X-MEDIA-VERSION' => $media_version) if $media_version; } sub _render_dir_local { diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm --- old/MirrorCache-1.088/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm 2024-12-13 12:17:01.000000000 +0100 @@ -396,7 +396,7 @@ return 1; } - unless ($dm->pedantic) { + if (!$dm->pedantic || ($dm->is_head && $dm->pedantic < 2)) { # Check below is needed only when MIRRORCACHE_ROOT_COUNTRY is set # only with remote root and when no mirrors should be used for the root's country if ($country ne $mirror->{country} && $dm->root_is_better($mirror->{region}, $mirror->{lng})) { diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/WebAPI.pm new/MirrorCache-1.089/lib/MirrorCache/WebAPI.pm --- old/MirrorCache-1.088/lib/MirrorCache/WebAPI.pm 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/WebAPI.pm 2024-12-13 12:17:01.000000000 +0100 @@ -227,6 +227,7 @@ $rest_r->get('/myip')->name('rest_myip')->to('my_ip#show') if $self->_geodb; $rest_r->get('/stat')->name('rest_stat')->to('stat#list'); + $rest_r->get('/efficiency')->name('rest_efficiency')->to('efficiency#list'); my $report_r = $r->any('/report')->to(namespace => 'MirrorCache::WebAPI::Controller::Report'); $report_r->get('/mirror')->name('report_mirror')->to('mirror#index'); @@ -247,6 +248,7 @@ $app_r->get('/project')->name('project')->to('project#index'); $app_r->get('/project/#id')->name('project_show')->to('project#show'); $app_r->get('/rollout_server/:version')->to('rollout_server#index'); + $app_r->get('/efficiency')->to('efficiency#index'); my $admin = $r->any('/admin'); my $admin_auth = $admin->under('/')->to('session#ensure_admin')->name('ensure_admin'); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/resources/migrations/Pg.sql new/MirrorCache-1.089/lib/MirrorCache/resources/migrations/Pg.sql --- old/MirrorCache-1.088/lib/MirrorCache/resources/migrations/Pg.sql 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/resources/migrations/Pg.sql 2024-12-13 12:17:01.000000000 +0100 @@ -444,4 +444,5 @@ -- 40 up update popular_os set mask = '.*[lL]eap(/|_)(([1-9][0-9])(.|_)([0-9])?(-test|-Current)?)/.*|(.*/(16|15|12|43|42).(0|1|2|3|4|5|6)/.*)' where id = 4; insert into popular_os(id,name,mask) select 10, 'slowroll', '.*/[Ss]lowroll/.*' on conflict do nothing; - +-- 41 up +alter table stat_agg add primary key (period, dt, mirror_id); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/lib/MirrorCache/resources/migrations/mysql.sql new/MirrorCache-1.089/lib/MirrorCache/resources/migrations/mysql.sql --- old/MirrorCache-1.088/lib/MirrorCache/resources/migrations/mysql.sql 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/lib/MirrorCache/resources/migrations/mysql.sql 2024-12-13 12:17:01.000000000 +0100 @@ -457,3 +457,5 @@ -- 40 up update popular_os set mask = '.*[lL]eap(/|_)(([1-9][0-9])(.|_)([0-9])?(-test|-Current)?)/.*|(.*/(16|15|12|43|42).(0|1|2|3|4|5|6)/.*)' where id = 4; insert into popular_os(id,name,mask) select 10, 'slowroll', '.*/[Ss]lowroll/.*' on duplicate key update id=id; +-- 41 up +alter table stat_agg add primary key if not exists (period, dt, mirror_id); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/01-smoke-mirror-hasall-remote.sh new/MirrorCache-1.089/t/environ/01-smoke-mirror-hasall-remote.sh --- old/MirrorCache-1.088/t/environ/01-smoke-mirror-hasall-remote.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/01-smoke-mirror-hasall-remote.sh 2024-12-13 12:17:01.000000000 +0100 @@ -56,7 +56,7 @@ $mc/curl /download/folder1/file1.1.dat.metalink | grep "${FAKEURL2}"/folder1/file1.1.dat # with pedantic we ignore it though rc=0 -$mc/curl -I /download/folder1/file1.1.dat?"COUNTRY=it&PEDANTIC=1" | grep "${FAKEURL}" || rc=$? +$mc/curl -I /download/folder1/file1.1.dat?"COUNTRY=it&PEDANTIC=2" | grep "${FAKEURL}" || rc=$? test $rc -gt 0 echo When METALINK_GREEDY is set, REDIRECT url will appear only as comment if mirror count exceeds value of METALINK_GREEDY diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/01-smoke-mirror-hasall.sh new/MirrorCache-1.089/t/environ/01-smoke-mirror-hasall.sh --- old/MirrorCache-1.088/t/environ/01-smoke-mirror-hasall.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/01-smoke-mirror-hasall.sh 2024-12-13 12:17:01.000000000 +0100 @@ -42,7 +42,7 @@ $mc/curl /download/folder1/file1.1.dat.metalink | grep "${FAKEURL}"/folder1/file1.1.dat # with pedantic we ignore it though rc=0 -$mc/curl -I /download/folder1/file1.1.dat?"COUNTRY=it&PEDANTIC=1" | grep "${FAKEURL}" || rc=$? +$mc/curl -I /download/folder1/file1.1.dat?"COUNTRY=it&PEDANTIC=2" | grep "${FAKEURL}" || rc=$? test $rc -gt 0 echo test LIMIT with meta4 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/02-files.sh new/MirrorCache-1.089/t/environ/02-files.sh --- old/MirrorCache-1.088/t/environ/02-files.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/02-files.sh 2024-12-13 12:17:01.000000000 +0100 @@ -38,11 +38,11 @@ $ap7/start $ap8/start -$mc/curl -I -H "Accept: */*, application/metalink+xml" /download/Folder1/repodata/repomd.xml | grep '200 OK' -$mc/curl -I -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: $(date -u --rfc-3339=seconds --date='1 second ago')" /download/Folder1/repodata/repomd.xml | grep '304 Not Modified' -$mc/curl -I -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: $(date -u --rfc-3339=seconds --date='1 hour ago')" /download/Folder1/repodata/repomd.xml | grep '200 OK' -$mc/curl -I -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: Sun, 06 Nov 1994 08:49:37 GMT" /download/Folder1/repodata/repomd.xml | grep '200 OK' -$mc/curl -I -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: Smoe 10 Garbage 10:53:46 UTC 2024x" /download/Folder1/repodata/repomd.xml | grep '200 OK' +$mc/curl -i -H "Accept: */*, application/metalink+xml" /download/Folder1/repodata/repomd.xml | grep '200 OK' +$mc/curl -i -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: $(date -u --rfc-3339=seconds --date='1 second ago')" /download/Folder1/repodata/repomd.xml | grep '304 Not Modified' +$mc/curl -i -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: $(date -u --rfc-3339=seconds --date='1 hour ago')" /download/Folder1/repodata/repomd.xml | grep '200 OK' +$mc/curl -i -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: Sun, 06 Nov 1994 08:49:37 GMT" /download/Folder1/repodata/repomd.xml | grep '200 OK' +$mc/curl -i -H "Accept: */*, application/metalink+xml" -H "If-Modified-Since: Smoe 10 Garbage 10:53:46 UTC 2024x" /download/Folder1/repodata/repomd.xml | grep '200 OK' $mc/sql "insert into server(hostname,urldir,enabled,country,region) select '$($ap7/print_address)','','t','us','na'" $mc/sql "insert into server(hostname,urldir,enabled,country,region) select '$($ap8/print_address)','','t','ca','na'" @@ -53,8 +53,8 @@ echo 1 > $ap8/dt/folder1/file1.dat # force scan -$mc/curl -I /download/folder1/file2.1.dat -$mc/curl -I /download/folder1/file2.1.dat?COUNTRY=ca +$mc/curl -i /download/folder1/file2.1.dat +$mc/curl -i /download/folder1/file2.1.dat?COUNTRY=ca $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule $mc/backstage/shoot @@ -65,16 +65,18 @@ test 2 == $($mc/db/sql "select count(*) from folder_diff") test 1 == $($mc/db/sql "select count(*) from folder_diff_file") -$mc/curl -I /download/folder1/file2.1.dat | grep 302 -$mc/curl -I /download/folder1/file1.dat | grep 302 +$mc/curl -i /download/folder1/file2.1.dat | grep 302 +$mc/curl -i /download/folder1/file1.dat | grep 302 mv $ap7/dt/folder1/file2.1.dat $ap8/dt/folder1/ mv $ap8/dt/folder1/file1.dat $ap7/dt/folder1/ -$mc/curl -I /download/folder1/file2.1.dat?PEDANTIC=0 | grep 302 -$mc/curl -I /download/folder1/file2.1.dat?PEDANTIC=1 | grep 200 +$mc/curl -i /download/folder1/file2.1.dat?PEDANTIC=0 | grep 302 +$mc/curl -i /download/folder1/file2.1.dat?PEDANTIC=1 | grep 200 # file1 isn't considered versioned, so pedantic mode is automatic -$mc/curl -I /download/folder1/file1.dat | grep 200 +$mc/curl -i /download/folder1/file1.dat | grep 200 +$mc/curl -I /download/folder1/file1.dat | grep 302 +$mc/curl -I /download/folder1/file1.dat?PEDANTIC=2 | grep 200 # make root the same size of folder1/file1.dat cp $ap7/dt/folder1/file1.dat $mc/dt/folder1/file1.dat @@ -85,9 +87,9 @@ $mc/backstage/job mirror_scan_schedule $mc/backstage/shoot -$mc/curl -I /download/folder1/file2.1.dat | grep 302 -$mc/curl -I /download/folder1/file1.1.dat | grep 302 -$mc/curl -I /download/folder1/file1.dat | grep 302 +$mc/curl -i /download/folder1/file2.1.dat | grep 302 +$mc/curl -i /download/folder1/file1.1.dat | grep 302 +$mc/curl -i /download/folder1/file1.dat | grep 302 # now add new file everywhere for x in $mc $ap7 $ap8; do @@ -95,7 +97,7 @@ done # first request will miss -$mc/curl -I /download/folder1/file3.1.dat | grep 200 +$mc/curl -i /download/folder1/file3.1.dat | grep 200 # pass too big value for prev_stat_id and make sure it is automatically adjusted $mc/backstage/job -e folder_sync_schedule_from_misses -a '["1000000"]' @@ -105,13 +107,13 @@ $mc/backstage/shoot # now expect to hit -$mc/curl -I /download/folder1/file3.1.dat | grep 302 +$mc/curl -i /download/folder1/file3.1.dat | grep 302 # now add new file only on main server and make sure it doesn't try to redirect touch $mc/dt/folder2/file4.dat -$mc/curl -I /download/folder2/file4.dat | grep 200 -$mc/curl -I /download/folder2/file4.dat?COUNTRY=ca | grep 200 +$mc/curl -i /download/folder2/file4.dat | grep 200 +$mc/curl -i /download/folder2/file4.dat?COUNTRY=ca | grep 200 $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule @@ -123,7 +125,7 @@ cnt="$($mc/db/sql "select max(id) from stat")" -$mc/curl -I /download/folder1/file2.1.dat | grep 302 +$mc/curl -i /download/folder1/file2.1.dat | grep 302 $mc/sql_test 0 == "select count(*) from stat where mirror_id = -1 and file_id is not NULL and id > $cnt" @@ -132,7 +134,7 @@ $mc/sql "update folder set scan_last = now() - interval '5 hour' where path = '/folder2'" $mc/sql "update folder set scan_scheduled = scan_last - interval '1 second' where path = '/folder2'" $mc/sql "update folder set scan_requested = scan_last - interval '2 second' where path = '/folder2'" -$mc/curl -I /download/folder2/file4.dat | grep 200 +$mc/curl -i /download/folder2/file4.dat | grep 200 # now an error must be logged $mc/sql_test 1 == "select count(*) from folder where path = '/folder2' and scan_requested > scan_scheduled" @@ -141,13 +143,13 @@ # let's test path distortions # remember number of folders in DB cnt=$($mc/db/sql "select count(*) from folder") -$mc/curl -I /download//folder1//file1.1.dat +$mc/curl -i /download//folder1//file1.1.dat $mc/sql_test $cnt == "select count(*) from folder" -$mc/curl -I /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 ################################## # now add media.1/media @@ -157,7 +159,7 @@ echo CONTENT2 > $x/dt/folder1/media.1/media done -$mc/curl -I /download/folder1/media.1/file1.1.dat +$mc/curl -i /download/folder1/media.1/file1.1.dat sleep $MIRRORCACHE_SCHEDULE_RETRY_INTERVAL $mc/backstage/shoot @@ -176,9 +178,9 @@ $mc/backstage/shoot for f in $unversionedfiles; do - $mc/curl -I /download/folder1.11test/$f | grep 200 + $mc/curl -i /download/folder1.11test/$f | grep 200 # sha256 must be served from root - [[ $f =~ sha256 ]] || $mc/curl -I /download/folder1.11test/$f?PEDANTIC=0 | grep 302 + [[ $f =~ sha256 ]] || $mc/curl -i /download/folder1.11test/$f?PEDANTIC=0 | grep 302 cp $ap7/dt/folder1.11test/$f $mc/dt/folder1.11test/ done @@ -191,19 +193,19 @@ # now unversioned files are served from mirror because they are the same as on root for f in $unversionedfiles; do # sha256 must be served from root - [[ $f =~ sha256 ]] || $mc/curl -I /download/folder1.11test/$f | grep 302 + [[ $f =~ sha256 ]] || $mc/curl -i /download/folder1.11test/$f | grep 302 done # test case insensitive: -$mc/curl -I /download/folder1/file1.1.dat | grep '302 Found' -$mc/curl -I /download/folder1/file1.1.dat | grep -i Etag -$mc/curl -I /download/Folder1/file1.1.DAT | grep '200 OK' +$mc/curl -i /download/folder1/file1.1.dat | grep '302 Found' +$mc/curl -i /download/folder1/file1.1.dat | grep -i Etag +$mc/curl -i /download/Folder1/file1.1.DAT | grep '200 OK' echo check cache control -$mc/curl -I -H "Accept: */*, application/metalink+xml" /download/Folder1/repodata/repomd.xml | grep Cache-Control -$mc/curl -I /download/folder1/file1.1.dat | grep Cache-Control -$mc/curl -I /download/Folder1/file1.1.DAT | grep Cache-Control +$mc/curl -i -H "Accept: */*, application/metalink+xml" /download/Folder1/repodata/repomd.xml | grep Cache-Control +$mc/curl -i /download/folder1/file1.1.dat | grep Cache-Control +$mc/curl -i /download/Folder1/file1.1.DAT | grep Cache-Control diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/03-geo-locations.sh new/MirrorCache-1.089/t/environ/03-geo-locations.sh --- old/MirrorCache-1.088/t/environ/03-geo-locations.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/03-geo-locations.sh 2024-12-13 12:17:01.000000000 +0100 @@ -4,7 +4,7 @@ mc=$(environ mc $(pwd)) MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL ap9=$(environ ap9) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-current.sh new/MirrorCache-1.089/t/environ/04-remote-current.sh --- old/MirrorCache-1.088/t/environ/04-remote-current.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-current.sh 2024-12-13 12:17:01.000000000 +0100 @@ -5,7 +5,7 @@ ap9=$(environ ap9) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 \ MIRRORCACHE_ROOT=http://$($ap9/print_address) \ MIRRORCACHE_ROOT_NFS=$ap9/dt diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-down.sh new/MirrorCache-1.089/t/environ/04-remote-down.sh --- old/MirrorCache-1.088/t/environ/04-remote-down.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-down.sh 2024-12-13 12:17:01.000000000 +0100 @@ -5,7 +5,7 @@ ap9=$(environ ap9) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ap9/print_address) ap8=$(environ ap8) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-link.sh new/MirrorCache-1.089/t/environ/04-remote-link.sh --- old/MirrorCache-1.088/t/environ/04-remote-link.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-link.sh 2024-12-13 12:17:01.000000000 +0100 @@ -5,7 +5,7 @@ ap9=$(environ ap9) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 \ MIRRORCACHE_ROOT=http://$($ap9/print_address) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-mc.sh new/MirrorCache-1.089/t/environ/04-remote-mc.sh --- old/MirrorCache-1.088/t/environ/04-remote-mc.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-mc.sh 2024-12-13 12:17:01.000000000 +0100 @@ -8,7 +8,7 @@ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ng9/print_address)/download \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL @@ -32,7 +32,7 @@ rm $ng8/dt/folder1/file2.1.dat # first request redirected to root -$mc/curl -I /download/folder1/file2.1.dat | grep $($ng9/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ng9/print_address) $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule @@ -47,13 +47,13 @@ test 2 == $($mc/db/sql "select count(*) from folder_diff") test 1 == $($mc/db/sql "select count(*) from folder_diff_file") -$mc/curl -I /download/folder1/file2.1.dat | grep $($ng7/print_address) -$mc/curl -I -H "If-Modified-Since: $(date -u --rfc-3339=seconds)" /download/folder1/file2.1.dat | grep '304 Not Modified' +$mc/curl -i /download/folder1/file2.1.dat | grep $($ng7/print_address) +$mc/curl -i -H "If-Modified-Since: $(date -u --rfc-3339=seconds)" /download/folder1/file2.1.dat | grep '304 Not Modified' mv $ng7/dt/folder1/file2.1.dat $ng8/dt/folder1/ # gets redirected to root again -$mc/curl -I /download/folder1/file2.1.dat | grep $($ng9/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ng9/print_address) $mc/backstage/job mirror_scan_schedule_from_path_errors $mc/backstage/job mirror_scan_schedule @@ -62,7 +62,7 @@ $mc/curl -H "Accept: */*, application/metalink+xml" /download/folder1/file2.1.dat | grep $($ng9/print_address) # now redirects to ng8 -$mc/curl -I /download/folder1/file2.1.dat | grep $($ng8/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ng8/print_address) # now add new file everywhere for x in $ng9 $ng7 $ng8; do @@ -73,12 +73,12 @@ $mc/backstage/job mirror_scan_schedule $mc/backstage/shoot # now expect to hit -$mc/curl -I /download/folder1/file3.1.dat | grep -E "$($ng8/print_address)|$($ng7/print_address)" +$mc/curl -i /download/folder1/file3.1.dat | grep -E "$($ng8/print_address)|$($ng7/print_address)" # now add new file only on main server and make sure it doesn't try to redirect touch $ng9/dt/folder1/file4.dat -$mc/curl -I /download/folder1/file4.dat | grep -E "$($ng9/print_address)" +$mc/curl -i /download/folder1/file4.dat | grep -E "$($ng9/print_address)" $mc/backstage/job folder_sync_schedule $mc/backstage/shoot $mc/backstage/job mirror_scan_schedule @@ -88,7 +88,7 @@ cnt="$($mc/db/sql "select count(*) from audit_event")" -$mc/curl -I /download/folder1/file4.dat +$mc/curl -i /download/folder1/file4.dat # it shouldn't try to probe yet, because scanner didn't find files on the mirrors test 0 == $($mc/db/sql "select count(*) from audit_event where name = 'mirror_probe' and id > $cnt") @@ -99,14 +99,14 @@ done # this is needed for schedule jobs to retry on next shoot -$mc/curl -I /download/folder1/folder11/file1.1.dat +$mc/curl -i /download/folder1/folder11/file1.1.dat $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule $mc/backstage/shoot $mc/backstage/job mirror_scan_schedule $mc/backstage/shoot -$mc/curl -I /download/folder1/folder11/file1.1.dat | grep -E "$($ng7/print_address)|$($ng8/print_address)" +$mc/curl -i /download/folder1/folder11/file1.1.dat | grep -E "$($ng7/print_address)|$($ng8/print_address)" $mc/curl /download/folder1/folder11/ | grep file1.1.dat @@ -119,7 +119,7 @@ # let's test path distortions # remember number of folders in DB cnt=$($mc/db/sql "select count(*) from folder") -$mc/curl -I /download//folder1//file1.1.dat +$mc/curl -i /download//folder1//file1.1.dat $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule $mc/backstage/shoot @@ -127,10 +127,10 @@ $mc/backstage/shoot test $cnt == $($mc/db/sql "select count(*) from folder") -$mc/curl -I /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 ################################## @@ -141,7 +141,7 @@ done # first request will miss -$mc/curl -I /download/folder1/file:4.dat | grep -E "$($ng9/print_address)" +$mc/curl -i /download/folder1/file:4.dat | grep -E "$($ng9/print_address)" $mc/db/sql "select s.id, s.hostname, fd.id, fd.hash, fl.name, fd.dt, fl.dt from @@ -174,7 +174,7 @@ # now expect to hit $mc/curl /download/folder1/ | grep file1.1.dat $mc/curl /download/folder1/ | grep file:4.dat -$mc/curl -I /download/folder1/file:4.dat | grep -E "$($ng8/print_address)|$(ng7*/print_address)" +$mc/curl -i /download/folder1/file:4.dat | grep -E "$($ng8/print_address)|$(ng7*/print_address)" ################################## f=0123456789012345678901234567890123456789.\(\#@~\)abcdefghijklmnoprst.dat @@ -200,8 +200,8 @@ $mc/curl /download/folder1/ | grep -B2 ln-Media.iso | grep '10 Byte' -$mc/curl -IL /download/folder1/$e | grep '200 OK' -$mc/curl -I /download/folder1/$e | grep -C20 '302 Found' | grep -E "$($ng7/print_address)|$($ng8/print_address)" | grep "/folder1/$e" +$mc/curl -iL /download/folder1/$e | grep '200 OK' +$mc/curl -i /download/folder1/$e | grep -C20 '302 Found' | grep -E "$($ng7/print_address)|$($ng8/print_address)" | grep "/folder1/$e" for x in $mc $ng9; do diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-nginx-redirect.sh new/MirrorCache-1.089/t/environ/04-remote-nginx-redirect.sh --- old/MirrorCache-1.088/t/environ/04-remote-nginx-redirect.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-nginx-redirect.sh 2024-12-13 12:17:01.000000000 +0100 @@ -6,7 +6,7 @@ ng9=$(environ ng9) ap9=$(environ ap9) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ng9/print_address) \ MIRRORCACHE_REDIRECT=http://$($ap9/print_address) \ MIRRORCACHE_REDIRECT_VPN=root.vpn.us \ @@ -49,7 +49,7 @@ mv $ng7/dt/folder1/file2.1.dat $ng8/dt/folder1/ -# gets redirected to MIRRORCACHE_REDIRECT again, because MIRRORCACHE_PEDANTIC is set to 1 +# gets redirected to MIRRORCACHE_REDIRECT again, because MIRRORCACHE_PEDANTIC is not 0 $mc/curl -I /download/folder1/file2.1.dat | grep $($ap9/print_address) $mc/backstage/job mirror_scan_schedule_from_path_errors diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote-nginx.sh new/MirrorCache-1.089/t/environ/04-remote-nginx.sh --- old/MirrorCache-1.088/t/environ/04-remote-nginx.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote-nginx.sh 2024-12-13 12:17:01.000000000 +0100 @@ -7,7 +7,7 @@ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ng9/print_address) \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/04-remote.sh new/MirrorCache-1.089/t/environ/04-remote.sh --- old/MirrorCache-1.088/t/environ/04-remote.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/04-remote.sh 2024-12-13 12:17:01.000000000 +0100 @@ -5,7 +5,7 @@ ap9=$(environ ap9) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ap9/print_address) \ MIRRORCACHE_TOP_FOLDERS="'folder1 folder2 folder3'" \ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 @@ -29,7 +29,7 @@ rm $ap8/dt/folder1/file2.1.dat # first request redirected to root -$mc/curl -I /download/folder1/file2.1.dat | grep $($ap9/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ap9/print_address) $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule @@ -44,12 +44,12 @@ $mc/sql 'insert into hash(file_id, size, mtime, dt) select 1, 5, extract(epoch from now()), now()' $mc/curl /download/folder1/ | grep -A2 file1.1.dat | grep '5 Byte' -$mc/curl -I /download/folder1/file2.1.dat | grep $($ap7/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ap7/print_address) mv $ap7/dt/folder1/file2.1.dat $ap8/dt/folder1/ # gets redirected to root again -$mc/curl -I /download/folder1/file2.1.dat | grep $($ap9/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ap9/print_address) $mc/backstage/job mirror_scan_schedule_from_path_errors $mc/backstage/shoot @@ -59,7 +59,7 @@ $mc/curl -H "Accept: */*, application/metalink+xml" /download/folder1/file2.1.dat | grep $($ap9/print_address) # now redirects to ap8 -$mc/curl -I /download/folder1/file2.1.dat | grep $($ap8/print_address) +$mc/curl -i /download/folder1/file2.1.dat | grep $($ap8/print_address) # now add new file everywhere for x in $ap9 $ap7 $ap8; do @@ -70,12 +70,12 @@ $mc/backstage/job mirror_scan_schedule $mc/backstage/shoot # now expect to hit -$mc/curl -I /download/folder1/file3.1.dat | grep -E "$($ap8/print_address)|$($ap7/print_address)" +$mc/curl -i /download/folder1/file3.1.dat | grep -E "$($ap8/print_address)|$($ap7/print_address)" # now add new file only on main server and make sure it doesn't try to redirect touch $ap9/dt/folder1/file4.dat -$mc/curl -I /download/folder1/file4.dat | grep -E "$($ap9/print_address)" +$mc/curl -i /download/folder1/file4.dat | grep -E "$($ap9/print_address)" $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule @@ -87,7 +87,7 @@ cnt="$($mc/sql 'select count(*) from audit_event')" -$mc/curl -I /download/folder1/file4.dat +$mc/curl -i /download/folder1/file4.dat # it shouldn't try to probe yet, because scanner didn't find files on the mirrors test 0 == $($mc/sql "select count(*) from audit_event where name = 'mirror_probe' and id > $cnt") @@ -98,14 +98,14 @@ done # this is needed for schedule jobs to retry on next shoot -$mc/curl -I /download/folder1/folder11/file1.1.dat +$mc/curl -i /download/folder1/folder11/file1.1.dat $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule $mc/backstage/shoot $mc/backstage/job mirror_scan_schedule $mc/backstage/shoot -$mc/curl -I /download/folder1/folder11/file1.1.dat | grep -E "$($ap7/print_address)|$($ap8/print_address)" +$mc/curl -i /download/folder1/folder11/file1.1.dat | grep -E "$($ap7/print_address)|$($ap8/print_address)" $mc/curl /download/folder1/folder11/ | grep file1.1.dat @@ -118,7 +118,7 @@ # let's test path distortions # remember number of folders in DB cnt=$($mc/sql "select count(*) from folder") -$mc/curl -I /download//folder1//file1.1.dat +$mc/curl -i /download//folder1//file1.1.dat $mc/backstage/job folder_sync_schedule_from_misses $mc/backstage/job folder_sync_schedule $mc/backstage/shoot @@ -126,10 +126,10 @@ $mc/backstage/shoot test $cnt == $($mc/sql "select count(*) from folder") -$mc/curl -I /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 -$mc/curl -I /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1//file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download//folder1///file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/././file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 +$mc/curl -i /download/./folder1/../folder1/./file1.1.dat | grep -C 10 -P '[^/]/folder1/file1.1.dat' | grep 302 ################################## @@ -140,7 +140,7 @@ done # first request will miss -$mc/curl -I /download/folder1/file:4.dat | grep -E "$($ap9/print_address)" +$mc/curl -i /download/folder1/file:4.dat | grep -E "$($ap9/print_address)" $mc/db/sql "select s.id, s.hostname, fd.id, fd.hash, fl.name, fd.dt, fl.dt from @@ -173,7 +173,7 @@ # now expect to hit $mc/curl /download/folder1/ | grep file1.1.dat $mc/curl /download/folder1/ | grep file:4.dat -$mc/curl -I /download/folder1/file:4.dat | grep -E "$($ap8/print_address)|$(ap7*/print_address)" +$mc/curl -i /download/folder1/file:4.dat | grep -E "$($ap8/print_address)|$(ap7*/print_address)" ################################## grep MirrorCache/mirror_scan $ap7/dt/access_log diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/10-remote-redirect.sh new/MirrorCache-1.089/t/environ/10-remote-redirect.sh --- old/MirrorCache-1.088/t/environ/10-remote-redirect.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/10-remote-redirect.sh 2024-12-13 12:17:01.000000000 +0100 @@ -20,7 +20,7 @@ # this mirror will have disabled both http and https ap6=$(environ ap6) -$mc/gen_env MIRRORCACHE_PEDANTIC=1 \ +$mc/gen_env MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_ROOT=http://$($ap5/print_address) \ MIRRORCACHE_REDIRECT=$($ap4/print_address) \ MOJO_CA_FILE=$(pwd)/ca/ca.pem \ @@ -145,7 +145,7 @@ # shutdown ap7, then https must redirect to ap4 $ap7/stop -$ap9/curl_https -I /folder1/file1.1.dat?PEDANTIC=1 | grep https:// | grep $($ap4/print_address) +$ap9/curl_https -I /folder1/file1.1.dat?PEDANTIC=2 | grep https:// | grep $($ap4/print_address) $ap9/curl_https /folder1/file1.1.dat.metalink | grep 'origin="https://metalink_publisher.net/folder1/file1.1.dat.metalink"' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/16-rescan-forget-unused.sh new/MirrorCache-1.089/t/environ/16-rescan-forget-unused.sh --- old/MirrorCache-1.088/t/environ/16-rescan-forget-unused.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/16-rescan-forget-unused.sh 2024-12-13 12:17:01.000000000 +0100 @@ -5,7 +5,7 @@ MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0 $mc/gen_env MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL \ - MIRRORCACHE_PEDANTIC=1 \ + MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_RECKLESS=1 $mc/start diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/16-rescan-from-errors.sh new/MirrorCache-1.089/t/environ/16-rescan-from-errors.sh --- old/MirrorCache-1.088/t/environ/16-rescan-from-errors.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/16-rescan-from-errors.sh 2024-12-13 12:17:01.000000000 +0100 @@ -7,7 +7,7 @@ $mc/gen_env MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL \ MIRRORCACHE_RESCAN_INTERVAL=$MIRRORCACHE_RESCAN_INTERVAL \ - MIRRORCACHE_PEDANTIC=1 \ + MIRRORCACHE_PEDANTIC=2 \ MIRRORCACHE_RECKLESS=0 $mc/start @@ -70,8 +70,8 @@ $mc/sql "update folder set sync_requested = sync_last - interval '2 second'" for x in {1,2,3} ; do - $mc/curl -I /download/folder$x/file1.1.dat?PEDANTIC=0 | grep '302 Found' - $mc/curl -I /download/folder$x/file1.1.dat | grep '200 OK' + $mc/curl -i /download/folder$x/file1.1.dat?PEDANTIC=0 | grep '302 Found' + $mc/curl -i /download/folder$x/file1.1.dat | grep '200 OK' done $mc/backstage/job mirror_scan_schedule_from_path_errors @@ -87,7 +87,7 @@ sleep $S for x in {1,2,3} ; do - $mc/curl -I /download/folder$x/file1.1.dat | grep '200 OK' + $mc/curl -i /download/folder$x/file1.1.dat | grep '200 OK' done $mc/backstage/job mirror_scan_schedule_from_path_errors @@ -100,3 +100,5 @@ $mc/sql_test 6 == "select count(*) from minion_jobs where task='folder_sync'" # only folder3 must cause mirror scan from path_errors, but folder1 will be rescanned from Stat.pm $mc/sql_test 5 == "select count(*) from minion_jobs where task='mirror_scan'" + +echo success diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/t/environ/20-report-download.sh new/MirrorCache-1.089/t/environ/20-report-download.sh --- old/MirrorCache-1.088/t/environ/20-report-download.sh 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/t/environ/20-report-download.sh 2024-12-13 12:17:01.000000000 +0100 @@ -100,14 +100,28 @@ $mc/curl -Is /download/repositories/home:/b1:/branches:/science:/EtherLab/Debian_Testing/arm64/libethercat_1.5.2-33_arm64.deb | grep 'X-MEDIA-VERSION: 1.5.2' - $mc/curl -Is /download/repositories/home:/b1:/branches:/science:/EtherLab/Debian_Testing/arm64/libethercat_1.5.2-33_arm64.deb | grep 'X-MEDIA-VERSION: 1.5.2' $mc/curl -Is '/download/distribution/leap/15.3/repo/oss/noarch/?REGEX=.*\.noarch\..?rpm' | grep 'X-MEDIA-VERSION: 17.5.0,7.3.6.2' +rc=0 +$mc/curl -Is /download/repositories/home:/b1:/branches:/science:/EtherLab/Debian_Testing/ | grep -i X-MEDIA-VERSION || rc=$? +test $rc -gt 0 + $mc/curl '/rest/repdownload?group=country&os=ubuntu' $mc/curl '/rest/repdownload?group=country,mirror&type=rpm' $mc/curl "/rest/repdownload?group=project&mirror=$(ap7/print_address)" $mc/curl '/rest/repdownload?group=project,mirror&country=de' + +$mc/backstage/job stat_agg_schedule +$mc/backstage/shoot + +$mc/sql "insert into stat_agg select dt - interval '1 day', period, mirror_id, hit_count from stat_agg where period = 'day'" +$mc/sql "insert into stat_agg select dt - interval '1 hour', period, mirror_id, hit_count from stat_agg where period = 'hour'" + +$mc/curl /rest/efficiency +$mc/curl /rest/efficiency?period=day + + echo success diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/templates/app/efficiency/index.html.ep new/MirrorCache-1.089/templates/app/efficiency/index.html.ep --- old/MirrorCache-1.088/templates/app/efficiency/index.html.ep 1970-01-01 01:00:00.000000000 +0100 +++ new/MirrorCache-1.089/templates/app/efficiency/index.html.ep 2024-12-13 12:17:01.000000000 +0100 @@ -0,0 +1,94 @@ +% layout 'bootstrap'; +% title 'Efficiency'; + +% content_for 'head' => begin + <script> + +function updateHistoryChart(period) { + var period1 = period + 'ly'; + if (period === 'day') { + period1 = 'daily'; + } + $.ajax({ + url: '/rest/efficiency', + method: 'GET', + data: { + period: period, + }, + success: function(response) { + layout = { + title: {text: 'Cache Efficiency ' + period1}, + yaxis: { + title: { + text: "Count" + } + }, + yaxis2: { + title: { + text: "Hit Rate", + font: {color: "#0000FF"} + }, + overlaying: 'y', + range: [0, 100], + fixedrange: true, + side: 'right' + }, + }; + + + data = [ + {name: 'hits', type: 'scatter', x: [], y: [], line: { color: "#00FF00", dash: 'solid', width: 2 }}, // hit + {name: 'misses', type: 'scatter', x: [], y: [], line: { color: "#FF0000", dash: 'solid', width: 2 }}, // miss + {name: 'passes', type: 'scatter', x: [], y: [], line: { color: "#FFFF00", dash: 'solid', width: 1 }}, // pass + {name: 'bot', type: 'scatter', x: [], y: [], line: { color: "#A52A2A", dash: 'dashdot', width: 1 }}, // bot + {name: 'geo', type: 'scatter', x: [], y: [], line: { color: "#220031", dash: 'dot', width: 1 }}, // geo + {name: 'efficiency', type: 'scatter', x: [], y: [], line: { color: "#0000FF", dash: 'solid', width: 3 }, yaxis: 'y2'}, // hitrate + ]; + + response.forEach((element, index, array) => { + var d = new Date(0); // The 0 there is the key, which sets the date to the epoch + d.setUTCSeconds(element.dt); + data.forEach((e, i, a) => { + data[i].x.push(d); + }); + data[0].y.push(element.hit); + data[1].y.push(element.miss); + data[2].y.push(element.pass); + data[3].y.push(element.bot); + data[4].y.push(element.geo); + if (element.hit + element.miss > 0) { + var rate = 100 * (element.hit / (eval(element.hit) + eval(element.miss))); + rate = Math.round(rate); + data[5].y.push(rate); + } + }); + + // Plotly.react('efficiency-chart', data, layout); + Plotly.newPlot('efficiency-chart', data, layout); + } + }); +} + + </script> +% end + + +% content_for 'ready_function' => begin + document.getElementsByClassName('tablinks')[1].click() + + // updateHistoryChart(); +% end + + +<div class="tab"> + <button class="tablinks" onclick="updateHistoryChart('hour')">Hourly</button> + <button class="tablinks" onclick="updateHistoryChart('day')">Daily</button> + <!-- button class="tablinks" onclick="updateHistoryChart('month')">Monthly</button --> +</div> + +<div class="row"> + <div class="col-md-12"> + <div id="efficiency-chart"></div> + </div> +</div> + diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/templates/branding/default/header.html.ep new/MirrorCache-1.089/templates/branding/default/header.html.ep --- old/MirrorCache-1.088/templates/branding/default/header.html.ep 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/templates/branding/default/header.html.ep 2024-12-13 12:17:01.000000000 +0100 @@ -22,6 +22,9 @@ <li class='nav-item' id="project"> %= link_to 'Projects' => url_for('/app/project') => class => 'nav-link' </li> + <li class='nav-item' id="efficiency"> + %= link_to 'Efficiency' => url_for('/app/efficiency') => class => 'nav-link' + </li> <li class='nav-item' id="stat"> %= link_to 'Statistics' => url_for('/rest/stat') => class => 'nav-link' </li> diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/MirrorCache-1.088/templates/branding/openSUSE/header.html.ep new/MirrorCache-1.089/templates/branding/openSUSE/header.html.ep --- old/MirrorCache-1.088/templates/branding/openSUSE/header.html.ep 2024-11-28 15:24:24.000000000 +0100 +++ new/MirrorCache-1.089/templates/branding/openSUSE/header.html.ep 2024-12-13 12:17:01.000000000 +0100 @@ -39,6 +39,7 @@ %= link_to 'Mirrors' => url_for('server') => class => 'dropdown-item' %= link_to 'Packages' => url_for('/app/package') => class => 'dropdown-item' %= link_to 'Projects' => url_for('project') => class => 'dropdown-item' + %= link_to 'Efficiency' => url_for('/app/efficiency') => class => 'dropdown-item' %= link_to 'Statistics' => url_for('/rest/stat') => class => 'dropdown-item' %= tag 'div' => class => 'dropdown-divider' %= tag 'h3' => class => 'dropdown-header' => 'User menu' ++++++ MirrorCache.obsinfo ++++++ --- /var/tmp/diff_new_pack.mBzrj3/_old 2024-12-20 15:29:08.680374575 +0100 +++ /var/tmp/diff_new_pack.mBzrj3/_new 2024-12-20 15:29:08.716376058 +0100 @@ -1,5 +1,5 @@ name: MirrorCache -version: 1.088 -mtime: 1732803864 -commit: df0f60d9831441fa74bb62147e230087c6458b2b +version: 1.089 +mtime: 1734088621 +commit: 1bb100fccac4537ab3f0950e158bfe3b911d0691 ++++++ cache.tar.xz ++++++
