Skip to content

Commit

Permalink
Merge pull request #2 from swissbib/master
Browse files Browse the repository at this point in the history
Aktualisierung Test-Branch
  • Loading branch information
Basil Marti authored Sep 20, 2018
2 parents d586669 + 59abb0a commit 83878d1
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 24 deletions.
1 change: 1 addition & 0 deletions bin/devel/initial-load-e_swissbib-db.pl
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
source: basel-bern-emedia.xml
EOD

@print "komplett neu aufbauen [j/N] ? ";
my $ans = <STDIN>;
exit unless $ans =~ /j/i;
Expand Down
13 changes: 7 additions & 6 deletions bin/ftp-download-data.pl
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,8 @@ sub extract_mono_full {
unlink $MONO_RUN_SUMMARY;
unlink 'RunSummary.txt';

my @lib = keys $downloads->{mono_full};

my @lib = keys %{$downloads->{mono_full}};

foreach my $lib ( @lib ) {
my @zip = glob( $downloads->{mono_full}->{$lib} );
Expand Down Expand Up @@ -269,8 +270,8 @@ sub extract_ser_full {
unlink $SER_RUN_SUMMARY;
unlink 'RunSummary.txt';

my @lib = keys $downloads->{ser_full};
my @lib = keys %{$downloads->{ser_full}};

foreach my $lib ( @lib ) {
my @zip = glob( $downloads->{ser_full}->{$lib} );
my $zip = $zip[0];
Expand Down Expand Up @@ -306,8 +307,8 @@ sub extract_mono_delta {

say "ftp: extracting monographs delta\n";

my @key = keys $downloads->{mono_delta};
my @key = keys %{$downloads->{mono_delta}};

foreach my $key ( @key ) {
my $dir = $key .'_delta';
mkdir $dir
Expand Down Expand Up @@ -340,7 +341,7 @@ sub extract_ser_delta {

say "ftp: extracting serials delta\n";

my @key = keys $downloads->{ser_delta};
my @key = keys %{$downloads->{ser_delta}};

foreach my $key ( @key ) {
my $dir = $key .'_delta';
Expand Down
1 change: 0 additions & 1 deletion bin/idsbb_emedia.conf
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@ DATADIR='/opt/data/e-books_test/data'
DOWNLOADDIR='/opt/data/e-books_test/download'
LOGDIR='/opt/data/e-books_test/log'
HIDDENCONF='/opt/scripts/e-books/bin/idsbb_emedia_hidden_test.conf'

2 changes: 1 addition & 1 deletion bin/idsbb_emedia_infomail.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Die E-Media-Metadaten aus Intota wurden soeben erfolgreich aufbereitet und nach
Die Daten werden in Swissbib in der naechsten Nacht verarbeitet und sind am Folgetag ab Mittag in
Swissbib und Swissbib Basel Bern mit "WaSeSS*" recherchierbar.

Siehe auch http://www.ub.unibas.ch/babette/index.php/E-Book_Marc_Records_fuer_Swissbib
Siehe auch http://www.ub.unibas.ch/babette/index.php/E-Ressourcen_Marc_Records_fuer_Swissbib

Herzlich

Expand Down
35 changes: 21 additions & 14 deletions bin/make-idsbb-emedia.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,15 @@ DO_MERGE=1
DO_SYNC=1
DO_DELTA=1
DO_UPLOAD=1
DO_SAVE=1
DO_CLEANUP=1
DO_EMAIL=1

DATE=`date +%Y%m%d`
LINE='------------------------------------------------'

#Das Logfile idsbb_emedia.conf enthält alle Variablen, die zwischen MASTER und TEST Branch abweichen. In diesem
#Logfile findet sich auch der Pfad zur versteckten Logdatei ($HIDDENCONF), in dem E-Mail-Adressen und Zugangs-
#Das Conffile idsbb_emedia.conf enthält alle Variablen, die zwischen MASTER und TEST Branch abweichen. In diesem
#Confile findet sich auch der Pfad zur versteckten Confdatei ($HIDDENCONF), in dem E-Mail-Adressen und Zugangs-
#berechtigungen zu Proquest enthalten sind. Diese Datei wird nicht nach Github exportiert. Sie liegt für MASTER und
#TEST in zwei Versionen vor (idsbb_emedia_hidden.conf für MASTER und idsbb_emedia_hidden_test.conf für TEST.

Expand All @@ -48,7 +49,7 @@ cd $DATADIR

if [ "$DO_DOWNLOAD" == "1" ]; then
echo "* download and extract data" >> $LOG
perl $BINDIR/ftp-download-data.pl
perl $BINDIR/ftp-download-data.pl &>> $LOG
if [ "$?" != "0" ]; then
exit;
fi
Expand All @@ -59,7 +60,7 @@ if [ "$DO_MERGE" == "1" ]; then
echo "* [please be patient for about 30 minutes...]" >> $LOG
rm -f tmp.xml
rm -f basel-bern-emedia.xml
perl $BINDIR/merge-erm-ebook-marc.pl
perl $BINDIR/merge-erm-ebook-marc.pl &>> $LOG
if [ "$?" != "0" ]; then
exit;

Expand All @@ -73,7 +74,7 @@ fi

if [ "$DO_SYNC" == "1" ]; then
echo "* synchronizing MySQL database" >> $LOG
perl $BINDIR/sync-deltas-with-local-db.pl
perl $BINDIR/sync-deltas-with-local-db.pl &>> $LOG
if [ "$?" != "0" ]; then
exit;
fi
Expand All @@ -85,7 +86,7 @@ if [ "$DO_DELTA" == "1" ]; then
rm -f sersol-idsbb-emedia-updates.xml
rm -f sersol-idsbb-emedia-updates.xml.gz
rm -f sersol-idsbb-emedia-deletions.txt
perl $BINDIR/create-delta-files.pl
perl $BINDIR/create-delta-files.pl &>> $LOG
if [ "$?" != "0" ]; then
exit;
fi
Expand All @@ -95,30 +96,36 @@ if [ "$DO_DELTA" == "1" ]; then
exit;
fi
echo "* gzipping xml" >> $LOG
gzip -f sersol-idsbb-emedia-updates-reformatted.xml
gzip -f sersol-idsbb-emedia-updates-reformatted.xml &>> $LOG
echo "* writing stats" >> $LOG
perl $BINDIR/e_swissbib_db_stats.pl
perl $BINDIR/e_swissbib_db_stats.pl &>> $LOG
if [ "$?" != "0" ]; then
exit;
fi
fi

if [ "$DO_UPLOAD" == "1" ]; then
echo "* upload data" >> $LOG
scp sersol-idsbb-emedia-updates-reformatted.xml.gz harvester@sb-coai1.swissbib.unibas.ch:/swissbib/harvesting/incomingSersol/./
scp sersol-idsbb-emedia-deletions.txt harvester@sb-coai1.swissbib.unibas.ch:/swissbib/harvesting/oaiDeletes/./
scp sersol-idsbb-emedia-updates-reformatted.xml.gz harvester@sb-ucoai1.swissbib.unibas.ch:/swissbib/harvesting/incomingSersol/./ &>> $LOG
scp sersol-idsbb-emedia-deletions.txt harvester@sb-ucoai1.swissbib.unibas.ch:/swissbib/harvesting/oaiDeletes/./ &>> $LOG
fi

if [ "$DO_SAVE" == "1" ]; then
echo "* saving data" >> $LOG
cp sersol-idsbb-emedia-updates-reformatted.xml.gz backup/sersol-idsbb-emedia-updates-reformatted-$DATE.xml.gz &>> $LOG
cp sersol-idsbb-emedia-deletions.txt backup/sersol-idsbb-emedia-deletions-$DATE.txt &>> $LOG
fi

if [ "$DO_CLEANUP" == "1" ]; then
echo "* clean up temp files" >> $LOG
rm -f *.mrc
rm -f tmp.xml
rm -f *.mrc &>> $LOG
rm -f tmp.xml &>> $LOG
fi

printf 'END ' && date >> $LOG

cp $STATS $STATS_ARCH
cp $SHADOW_STATS $SHADOW_STATS_ARCH
cp $STATS $STATS_ARCH &>> $LOG
cp $SHADOW_STATS $SHADOW_STATS_ARCH &>> $LOG

if [ "$DO_EMAIL" == "1" ]; then
# Log-Datei an EDV nach jedem Lauf verschicken:
Expand Down
2 changes: 1 addition & 1 deletion bin/merge-erm-ebook-marc.pl
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ =head1 HISTORY

printf F ("Total records:%18.18s\n", pnum($stats->{total}));
delete $stats->{total};
foreach my $key (sort keys $stats ) {
foreach my $key (sort keys %$stats ) {
my $num = pnum($stats->{$key});
printf F ("- with holdings %-6.6s%10.10s\n", $key, $num);
}
Expand Down
2 changes: 1 addition & 1 deletion bin/sync-deltas-with-local-db.pl
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ =head1 HISTORY
new/changed/deleted 360MARC messages:
-----------------------------------------------------
EOD
foreach my $key (sort keys $stats ) {
foreach my $key (sort keys %$stats ) {
printf F ("%-8.8s | new:%8.8s | chg:%8.8s | del:%8.8s\n",
$key,
pnum($stats->{$key}->{new}),
Expand Down

0 comments on commit 83878d1

Please sign in to comment.