File:
[LON-CAPA] /
loncom /
metadata_database /
parse_activity_log.pl
Revision
1.26:
download - view:
text,
annotated -
select for diffs
Thu Nov 21 07:26:02 2024 UTC (4 weeks, 5 days ago) by
raeburn
Branches:
MAIN
CVS tags:
version_2_12_X,
HEAD
- Where session launch was via LTI-mediated deep-linking from another CMS
and launch payload included URL and uniqueID for return of score:
(a) successful score transfer
- logged in course's activity log (action = EXPORT)
- passback information stored in student's $cdom_$cnum_lp_passback.db
(b) unsuccessful score transfer
- logged in student's activity log and lonnet.log
- passback information stored in course's linkprot_passback_pending.db
#!/usr/bin/perl
#
# The LearningOnline Network
#
# $Id: parse_activity_log.pl,v 1.26 2024/11/21 07:26:02 raeburn Exp $
#
# Copyright Michigan State University Board of Trustees
#
# This file is part of the LearningOnline Network with CAPA (LON-CAPA).
#
# LON-CAPA is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# LON-CAPA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LON-CAPA; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# /home/httpd/html/adm/gpl.txt
#
# http://www.lon-capa.org/
#
#--------------------------------------------------------------------
#
# Exit codes
# 0 Everything is okay
# 1 Another copy is running on this course
# 2 Activity log does not exist
# 3 Unable to connect to database
# 4 Unable to create database tables
# 5 Unable to open log file
# 6 Unable to get lock on activity log
#
#
# Notes:
#
# Logging is done via the $logthis variable, which may be the result of
# overcleverness. log via $logthis->('logtext'); Those are parentheses,
# not curly braces. If the -log command line parameter is set, the $logthis
# routine is set to a routine which writes to a file. If the command line
# parameter is not set $logthis is set to ¬hing, which does what you
# would expect.
#
use strict;
use DBI;
use lib '/home/httpd/lib/perl/';
use LONCAPA::Configuration();
use Apache::lonmysql();
use Time::HiRes();
use Getopt::Long();
use IO::File;
use File::Copy;
use Fcntl qw(:flock);
use HTML::TokeParser;
#
# Determine parameters
my ($help,$course,$domain,$drop_when_done,$srcfile,$logfile,$time_run,$nocleanup,$log,$backup,$xmlfile);
&Getopt::Long::GetOptions( "course=s" => \$course,
"domain=s" => \$domain,
"backup" => \$backup,
"help" => \$help,
"logfile=s" => \$logfile,
"srcfile=s" => \$srcfile,
"justloadxml=s" => \$xmlfile,
"timerun" => \$time_run,
"nocleanup" => \$nocleanup,
"dropwhendone" => \$drop_when_done,
"log" => \$log);
if (! defined($course) || $help) {
print<<USAGE;
parse_activity_log.pl
Process a lon-capa activity log into a database.
Parameters:
course Required
domain optional
backup optional if present, backup the activity log file
before processing it
dropwhendone optional if present, drop all course
specific activity log tables after processing.
srcfile optional Specify the file to parse, including path
time optional if present, print out timing data
nocleanup optional if present, do not remove old files
log optional if present, prepare log file of activity
logfile optional specifies the logfile to use
Examples:
$0 -course=123456abcdef -domain=msu
$0 -course=123456abcdef -srcfile=activity.log
$0 -course-123456abcdef -log -logfile=/tmp/logfile -dropwhendone
USAGE
exit;
}
##
## Set up timing code
my $time_this = \¬hing;
if ($time_run) {
$time_this = \&time_action;
}
my $initial_time = Time::HiRes::time;
##
## Read in configuration parameters
##
my %perlvar = %{&LONCAPA::Configuration::read_conf('loncapa.conf')};
if (! defined($domain) || $domain eq '') {
$domain = $perlvar{'lonDefDomain'};
}
&update_process_name($course.'@'.$domain);
##
## Set up logging code
my $logthis = \¬hing;
if ($log) {
if (! $logfile) {
$logfile = $perlvar{'lonDaemons'}.'/tmp/parse_activity_log.log.'.time;
}
print STDERR "$0: logging to $logfile".$/;
if (! open(LOGFILE,">$logfile")) {
warn("Unable to open $logfile for writing. Run aborted.");
&clean_up_and_exit(5);
} else {
$logthis = \&log_to_file;
}
}
##
## Determine filenames
##
my $sourcefilename; # activity log data
my $newfilename; # $sourcefilename will be renamed to this
my $error_filename; # Errors in parsing the activity log will be written here
my $chunk_filename; # where we save data we are not going to write to db
if ($srcfile) {
$sourcefilename = $srcfile;
} else {
$sourcefilename = &get_filename($course,$domain);
}
my $sql_filename = $sourcefilename;
$sql_filename =~ s|[^/]*$|activity.log.sql|;
my $gz_sql_filename = $sql_filename.'.gz';
#
$chunk_filename = $sourcefilename.".unprocessed_chunks";
#
my $xml_filename = $sourcefilename;
my $gz_xml_filename = $xml_filename.'.gz';
if (defined($xmlfile)) {
$xml_filename = $xmlfile;
if ($xml_filename =~ /\.gz$/) {
$gz_xml_filename = $xml_filename;
} else {
$gz_xml_filename = $xml_filename.'.gz';
}
} else {
my $xml_filename = $sourcefilename;
$xml_filename =~ s|[^/]*$|activity.log.xml|;
$gz_xml_filename = $xml_filename.'.gz';
}
#
$error_filename = $sourcefilename;
$error_filename =~ s|[^/]*$|activity.log.errors|;
$logthis->('Beginning logging '.time);
#
# Wait for a lock on the lockfile to avoid collisions
my $lockfilename = $sourcefilename.'.lock';
$newfilename = $sourcefilename.'.processing';
if (! defined($xmlfile)) {
open(LOCKFILE,'>'.$lockfilename);
if (!flock(LOCKFILE,LOCK_EX|LOCK_NB)) {
warn("Unable to lock $lockfilename. Aborting".$/);
# don't call clean_up_and_exit another instance is running and
# we don't want to 'cleanup' their files
exit 6;
}
if (! -e $newfilename && -e $sourcefilename) {
$logthis->('renaming '.$sourcefilename.' to '.$newfilename);
rename($sourcefilename,$newfilename);
Copy($newfilename,$newfilename.'.'.time) if ($backup);
$logthis->("renamed $sourcefilename to $newfilename");
} elsif (! -e $newfilename) {
utime(undef,undef,$newfilename);
}
}
##
## Table definitions
##
my %tables = &table_names($course,$domain);
my $student_table_def =
{ id => $tables{'student'},
permanent => 'no',
columns => [
{ name => 'student_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',
auto_inc => 'yes', },
{ name => 'student',
type => 'VARCHAR(100) BINARY',
restrictions => 'NOT NULL', },
],
'PRIMARY KEY' => ['student_id',],
};
my $res_table_def =
{ id => $tables{'res'},
permanent => 'no',
columns => [{ name => 'res_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',
auto_inc => 'yes', },
{ name => 'resource',
type => 'MEDIUMTEXT',
restrictions => 'NOT NULL'},
],
'PRIMARY KEY' => ['res_id'],
};
#my $action_table_def =
#{ id => $action_table,
# permanent => 'no',
# columns => [{ name => 'action_id',
# type => 'MEDIUMINT UNSIGNED',
# restrictions => 'NOT NULL',
# auto_inc => 'yes', },
# { name => 'action',
# type => 'VARCHAR(100)',
# restrictions => 'NOT NULL'},
# ],
# 'PRIMARY KEY' => ['action_id',],
#};
my $machine_table_def =
{ id => $tables{'machine'},
permanent => 'no',
columns => [{ name => 'machine_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',
auto_inc => 'yes', },
{ name => 'machine',
type => 'VARCHAR(100)',
restrictions => 'NOT NULL'},
],
'PRIMARY KEY' => ['machine_id',],
};
my $activity_table_def =
{ id => $tables{'activity'},
permanent => 'no',
columns => [
{ name => 'res_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',},
{ name => 'time',
type => 'DATETIME',
restrictions => 'NOT NULL',},
{ name => 'student_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',},
{ name => 'action',
type => 'VARCHAR(10)',
restrictions => 'NOT NULL',},
{ name => 'idx', # This is here in case a student
type => 'MEDIUMINT UNSIGNED', # has multiple submissions during
restrictions => 'NOT NULL', # one second. It happens, trust
auto_inc => 'yes', }, # me.
{ name => 'machine_id',
type => 'MEDIUMINT UNSIGNED',
restrictions => 'NOT NULL',},
{ name => 'action_values',
type => 'MEDIUMTEXT', },
],
'PRIMARY KEY' => ['time','student_id','res_id','idx'],
'KEY' => [{columns => ['student_id']},
{columns => ['time']},],
};
my @Activity_Table = ($activity_table_def);
my @ID_Tables = ($student_table_def,$res_table_def,$machine_table_def);
##
## End of table definitions
##
$logthis->('tables = '.join(',',keys(%tables)));
$logthis->('Connectiong to mysql');
&Apache::lonmysql::set_mysql_user_and_password('www',
$perlvar{'lonSqlAccess'});
if (!&Apache::lonmysql::verify_sql_connection()) {
warn "Unable to connect to MySQL database.";
$logthis->("Unable to connect to MySQL database.");
&clean_up_and_exit(3);
}
$logthis->('SQL connection is up');
&update_process_name($course.'@'.$domain." loading existing data");
my $missing_table = &check_for_missing_tables(values(%tables));
if (-s $gz_sql_filename && ! -s $gz_xml_filename) {
my $backup_modification_time = (stat($gz_sql_filename))[9];
$logthis->($gz_sql_filename.' was last modified '.
localtime($backup_modification_time).
'('.$backup_modification_time.')');
if ($missing_table) {
# If the backup happened prior to the last table modification,
# we need to save the tables.
if (&latest_table_modification_time() > $backup_modification_time) {
# Save the current tables in case we need them another time.
$logthis->('Backing existing tables up');
&backup_tables_as_xml($gz_xml_filename.'.save_'.time,\%tables);
}
$time_this->();
&load_backup_sql_tables($gz_sql_filename);
&backup_tables_as_xml($gz_xml_filename,\%tables);
$time_this->('load backup tables');
}
} elsif (-s $gz_xml_filename) {
my $backup_modification_time = (stat($gz_xml_filename))[9];
$logthis->($gz_xml_filename.' was last modified '.
localtime($backup_modification_time).
'('.$backup_modification_time.')');
if ($missing_table) {
my $table_modification_time = $backup_modification_time;
# If the backup happened prior to the last table modification,
# we need to save the tables.
if (&latest_table_modification_time() > $backup_modification_time) {
# Save the current tables in case we need them another time.
$logthis->('Backing existing tables up');
&backup_tables_as_xml($gz_xml_filename.'.save_'.time,\%tables);
}
$time_this->();
# We have to make our own tables for the xml format
&drop_tables();
&create_tables();
&load_backup_xml_tables($gz_xml_filename,\%tables);
$time_this->('load backup tables');
}
}
if (defined($xmlfile)) {
&clean_up_and_exit(0);
}
##
## Ensure the tables we need exist
# create_tables does not complain if the tables already exist
$logthis->('creating tables');
if (! &create_tables()) {
warn "Unable to create tables";
$logthis->('Unable to create tables');
&clean_up_and_exit(4);
}
##
## Read the ids used for various tables
$logthis->('reading id tables');
&read_id_tables();
$logthis->('finished reading id tables');
##
## Set up the errors file
my $error_fh = IO::File->new(">>$error_filename");
##
## Parse the course log
$logthis->('processing course log');
&update_process_name($course.'@'.$domain." processing new data");
if (-s $newfilename) {
my $result = &process_courselog($newfilename,$error_fh,\%tables);
&update_process_name($course.'@'.$domain." backing up new data");
if (! defined($result)) {
# Something went wrong along the way...
$logthis->('process_courselog returned undef');
&clean_up_and_exit(5);
} elsif ($result > 0) {
$time_this->();
$logthis->('process_courselog returned '.$result.'.'.$/.
'Backing up tables');
&backup_tables_as_xml($gz_xml_filename,\%tables);
$time_this->('write backup tables');
}
if ($drop_when_done) { &drop_tables(); $logthis->('dropped tables'); }
}
close($error_fh);
##
## Clean up the filesystem
&Apache::lonmysql::disconnect_from_db();
unlink($newfilename) if (-e $newfilename && ! $nocleanup);
##
## Print timing data
$logthis->('printing timing data');
if ($time_run) {
my $elapsed_time = Time::HiRes::time - $initial_time;
print "Overall time: ".$elapsed_time.$/;
print &outputtimes();
$logthis->("Overall time: ".$elapsed_time);
$logthis->(&outputtimes());
}
&clean_up_and_exit(0);
########################################################
########################################################
sub clean_up_and_exit {
my ($exit_code) = @_;
# Close files
close(LOCKFILE);
close(LOGFILE);
# Remove zero length files
foreach my $file ($lockfilename, $error_filename,$logfile) {
if (defined($file) && -z $file) {
unlink($file);
}
}
exit $exit_code;
}
########################################################
########################################################
sub table_names {
my ($course,$domain) = @_;
my $prefix = $course.'_'.$domain.'_';
#
my %tables =
( student =>&Apache::lonmysql::fix_table_name($prefix.'students'),
res =>&Apache::lonmysql::fix_table_name($prefix.'resource'),
machine =>&Apache::lonmysql::fix_table_name($prefix.'machine_table'),
activity=>&Apache::lonmysql::fix_table_name($prefix.'activity'),
);
return %tables;
}
########################################################
########################################################
##
## Process Course Log
##
########################################################
########################################################
#
# Returns the number of lines in the activity.log file that were processed.
sub process_courselog {
my ($inputfile,$error_fh,$tables) = @_;
if (! open(IN,$inputfile)) {
warn "Unable to open '$inputfile' for reading";
$logthis->("Unable to open '$inputfile' for reading");
return undef;
}
my ($linecount,$insertcount);
my $dbh = &Apache::lonmysql::get_dbh();
#
&store_entry();
while (my $line=<IN>){
# last if ($linecount > 1000);
#
# Bulk storage variables
$time_this->();
chomp($line);
$linecount++;
# print $linecount++.$/;
my ($timestamp,$host,$log)=split(/\:/,$line,3);
#
# $log has the actual log entries; currently still escaped, and
# %26(timestamp)%3a(url)%3a(user)%3a(domain)
# then additionally
# %3aPOST%3a(name)%3d(value)%3a(name)%3d(value)
# or
# %3aCSTORE%3a(name)%3d(value)%26(name)%3d(value)
# or
# %3aPUTSTORE%3a(name)%3d(value)%26(name)%3d(value)
# or
# %3aEXPORT%3a(name)%3d(value)%26(name)%3d(value)
#
# get delimiter between timestamped entries to be &&&
$log=~s/\%26(\d{9,10})\%3a/\&\&\&$1\%3a/g;
$log = &unescape($log);
# now go over all log entries
if (! defined($host)) { $host = 'unknown'; }
my $prevchunk = 'none';
foreach my $chunk (split(/\&\&\&/,$log)) {
if (length($chunk) > 20000) {
# avoid putting too much data into the database
# (usually an uploaded file or something similar)
if (! &savechunk(\$chunk,$timestamp,$host)) {
close(IN);
return undef;
}
next;
}
my $warningflag = '';
my ($time,$res,$uname,$udom,$action,@values)= split(/:/,$chunk);
#
if (! defined($res) || $res =~ /^\s*$/) {
$res = '/adm/roles';
$action = 'LOGIN';
}
if ($res =~ m|^/prtspool/|) {
$res = '/prtspool/';
}
if (! defined($action) || $action eq '') {
$action = 'VIEW';
}
if ($action !~ /^(LOGIN|VIEW|POST|CSTORE|STORE|PUTSTORE|EXPORT)$/) {
$warningflag .= 'action';
print $error_fh 'full log entry:'.$log.$/;
print $error_fh 'error on chunk (saving)'.$/;
if (! &savechunk(\$chunk,$timestamp,$host)) {
close(IN);
return undef;
}
$logthis->('(action) Unable to parse chunk'.$/.
'got '.
'time = '.$time.$/.
'res = '.$res.$/.
'uname= '.$uname.$/.
'udom = '.$udom.$/.
'action='.$action.$/.
'@values = '.join('&',@values));
next; #skip it if we cannot understand what is happening.
}
#
my %data = (student => $uname.':'.$udom,
resource => $res,
machine => $host,
action => $action,
time => &Apache::lonmysql::sqltime($time));
if ($action eq 'POST') {
$data{'action_values'} =
$dbh->quote(join('&',map { &escape($_); } @values));
} else {
$data{'action_values'} = $dbh->quote(join('&',@values));
}
my $error = &store_entry($dbh,$tables,\%data);
if ($error) {
$logthis->('error store_entry:'.$error." on %data");
}
$prevchunk = $chunk;
}
}
my $result = &store_entry($dbh,$tables);
if (! defined($result)) {
my $error = &Apache::lonmysql::get_error();
warn "Error occured during insert.".$error;
$logthis->('error = '.$error);
}
close IN;
return $linecount;
##
##
sub savechunk {
my ($chunkref,$timestamp,$host) = @_;
my $chunk = &escape(${$chunkref});
if (! open(CHUNKFILE,">>$chunk_filename") ||
! print CHUNKFILE $timestamp.':'.$host.':'.$chunk.$/) {
# abort
close(CHUNKFILE);
return 0;
}
close(CHUNKFILE);
return 1;
}
}
##
## default value for $logthis and $time_this
sub nothing {
return;
}
##
## Logging routine (look for $log)
##
sub log_to_file {
my ($input)=@_;
print LOGFILE $input.$/;
}
##
## Timing routines
##
{
my %Timing;
my $starttime;
sub time_action {
my ($key) = @_;
if (defined($key)) {
$Timing{$key}+=Time::HiRes::time-$starttime;
$Timing{'count_'.$key}++;
}
$starttime = Time::HiRes::time;
}
sub outputtimes {
my $Str;
if ($time_run) {
$Str = "Timing Data:".$/;
while (my($k,$v) = each(%Timing)) {
next if ($k =~ /^count_/);
my $count = $Timing{'count_'.$k};
$Str .=
' '.sprintf("%25.25s",$k).
' '.sprintf('% 8d',$count).
' '.sprintf('%12.5f',$v).$/;
}
}
return $Str;
}
}
sub latest_table_modification_time {
my $latest_time;
foreach my $table (@Activity_Table,@ID_Tables) {
my %tabledata = &Apache::lonmysql::table_information($table->{'id'});
next if (! scalar(keys(%tabledata))); # table does not exist
if (! defined($latest_time) ||
$latest_time < $tabledata{'Update_time'}) {
$latest_time = $tabledata{'Update_time'};
}
}
return $latest_time;
}
sub check_for_missing_tables {
my @wanted_tables = @_;
# Check for missing tables
my @Current_Tables = &Apache::lonmysql::tables_in_db();
my %Found;
foreach my $tablename (@Current_Tables) {
foreach my $table (@wanted_tables) {
if ($tablename eq $table) {
$Found{$tablename}++;
}
}
}
$logthis->('Found tables '.join(',',keys(%Found)));
my $missing_a_table = 0;
foreach my $table (@wanted_tables) {
if (! $Found{$table}) {
$logthis->('Missing table '.$table);
$missing_a_table = 1;
last;
}
}
return $missing_a_table;
}
##
## Use mysqldump to store backups of the tables
##
sub backup_tables_as_sql {
my ($gz_sql_filename) = @_;
my $command = qq{mysqldump --quote-names --opt loncapa };
foreach my $table (@ID_Tables,@Activity_Table) {
my $tablename = $table->{'id'};
$tablename =~ s/\`//g;
$command .= $tablename.' ';
}
$command .= '| gzip >'.$gz_sql_filename;
$logthis->($command);
system($command);
}
##
## Load in mysqldumped files
##
sub load_backup_sql_tables {
my ($gz_sql_filename) = @_;
if (-s $gz_sql_filename) {
$logthis->('loading data from gzipped sql file');
my $command='gzip -dc '.$gz_sql_filename.' | mysql --database=loncapa';
system($command);
$logthis->('finished loading gzipped data');;
} else {
return undef;
}
}
##
##
##
sub update_process_name {
my ($text) = @_;
$0 = 'parse_activity_log.pl: '.$text;
}
sub get_filename {
my ($course,$domain) = @_;
my ($a,$b,$c,undef) = split('',$course,4);
return "$perlvar{'lonUsersDir'}/$domain/$a/$b/$c/$course/activity.log";
}
sub create_tables {
foreach my $table (@ID_Tables,@Activity_Table) {
my $table_id = &Apache::lonmysql::create_table($table);
if (! defined($table_id)) {
warn "Unable to create table ".$table->{'id'}.$/;
$logthis->('Unable to create table '.$table->{'id'});
$logthis->(join($/,&Apache::lonmysql::build_table_creation_request($table)));
return 0;
}
}
return 1;
}
sub drop_tables {
foreach my $table (@ID_Tables,@Activity_Table) {
my $table_id = $table->{'id'};
&Apache::lonmysql::drop_table($table_id);
}
}
#################################################################
#################################################################
##
## Database item id code
##
#################################################################
#################################################################
{ # Scoping for ID lookup code
my %IDs;
sub read_id_tables {
foreach my $table (@ID_Tables) {
my @Data = &Apache::lonmysql::get_rows($table->{'id'});
my $count = 0;
foreach my $row (@Data) {
$IDs{$table->{'id'}}->{$row->[1]} = $row->[0];
}
}
return;
}
sub get_id {
my ($table,$fieldname,$value) = @_;
if (exists($IDs{$table}->{$value}) && $IDs{$table}->{$value} =~ /^\d+$/) {
return $IDs{$table}->{$value};
} else {
# insert into the table - if the item already exists, that is
# okay.
my $result = &Apache::lonmysql::store_row($table,[undef,$value]);
if (! defined($result)) {
warn("Got error on id insert for $value\n".
&Apache::lonmysql::get_error());
}
# get the id
my $id = &Apache::lonmysql::get_dbh()->{'mysql_insertid'};
if (defined($id)) {
$IDs{$table}->{$value}=$id;
} else {
$logthis->("Unable to retrieve id for $table $fieldname $value");
return undef;
}
}
}
} # End of ID scoping
###############################################################
###############################################################
##
## Save as XML
##
###############################################################
###############################################################
sub backup_tables_as_xml {
my ($filename,$tables) = @_;
open(XMLFILE,"|gzip - > $filename") || return ('error:unable to write '.$filename);
my $query = qq{
SELECT B.resource,
A.time,
A.idx,
C.student,
A.action,
E.machine,
A.action_values
FROM $tables->{'activity'} AS A
LEFT JOIN $tables->{'res'} AS B ON B.res_id=A.res_id
LEFT JOIN $tables->{'student'} AS C ON C.student_id=A.student_id
LEFT JOIN $tables->{'machine'} AS E ON E.machine_id=A.machine_id
ORDER BY A.time DESC
};
$query =~ s/\s+/ /g;
my $dbh = &Apache::lonmysql::get_dbh();
my $sth = $dbh->prepare($query);
if (! $sth->execute()) {
$logthis->('<font color="blue">'.
'WARNING: Could not retrieve from database:'.
$sth->errstr().'</font>');
return undef;
} else {
my ($res,$sqltime,$idx,$student,$action,$machine,$action_values);
if ($sth->bind_columns(\$res,\$sqltime,\$idx,\$student,\$action,
\$machine,\$action_values)) {
while ($sth->fetch) {
print XMLFILE '<row>'.
qq{<resource>$res</resource>}.
qq{<time>$sqltime</time>}.
qq{<idx>$idx</idx>}.
qq{<student>$student</student>}.
qq{<action>$action</action>}.
qq{<machine>$machine</machine>}.
qq{<action_values>$action_values</action_values>}.
'</row>'.$/;
}
} else {
warn "Unable to bind to columns.\n";
return undef;
}
}
close XMLFILE;
return;
}
###############################################################
###############################################################
##
## load as xml
##
###############################################################
###############################################################
{
my @fields = ('resource','time',
'student','action','idx','machine','action_values');
my %ids = ();
sub load_backup_xml_tables {
my ($filename,$tables) = @_;
my $dbh = &Apache::lonmysql::get_dbh();
my $xmlfh;
open($xmlfh,"cat $filename | gzip -d - |");
if (! defined($xmlfh)) {
return ('error:unable to read '.$filename);
}
#
%ids = (resource=> {"\0count"=>1},
student=> {"\0count"=>1},
machine=> {"\0count"=>1});
#
my %data;
while (my $inputline = <$xmlfh>) {
my ($resource,$time,undef,$student,$action,$machine,$action_values) =
($inputline =~ m{<row>
<resource>(.*)</resource>
<time>(.*)</time>
<idx>(.*)</idx>
<student>(.*)</student>
<action>(.*)</action>
<machine>(.*)</machine>
<action_values>(.*)</action_values>
</row>$
}x
);
my $resource_id = &xml_get_id('resource',$resource);
my $student_id = &xml_get_id('student',$student);
my $machine_id = &xml_get_id('machine',$machine);
&xml_store_activity_row(map { defined($_)?$dbh->quote($_):''
} ($resource_id,
$time,
$student_id,
$action,
'NULL',
$machine_id,
$action_values));
}
&xml_store_activity_row();
close($xmlfh);
# Store id tables
while (my ($id_name,$id_data) = each(%ids)) {
if ($id_name eq 'resource') { $id_name = 'res'; }
delete($id_data->{"\0count"});
&xml_store_id_table($id_name,$id_data);
}
return;
}
sub xml_get_id {
my ($table,$element) = @_;
if (! exists($ids{$table}->{$element})) {
$ids{$table}->{$element} = $ids{$table}->{"\0count"}++;
}
return $ids{$table}->{$element};
}
{
my @data_rows;
sub xml_store_activity_row {
my @data = @_;
if (scalar(@data)) {
push(@data_rows,[@data]);
}
if (! scalar(@data) || scalar(@data_rows) > 500) {
if (! &Apache::lonmysql::bulk_store_rows($tables{'activity'},
scalar(@{$data_rows[0]}),
\@data_rows)) {
$logthis->("Error:".&Apache::lonmysql::get_error());
warn("Error:".&Apache::lonmysql::get_error());
} else {
undef(@data_rows);
}
}
return;
}
}
sub xml_store_id_table {
my ($table,$tabledata) =@_;
my $dbh = &Apache::lonmysql::get_dbh();
if (! &Apache::lonmysql::bulk_store_rows
($tables{$table},2,
[map{[$tabledata->{$_},$dbh->quote($_)]} keys(%$tabledata)])) {
$logthis->("Error:".&Apache::lonmysql::get_error());
warn "Error:".&Apache::lonmysql::get_error().$/;
}
}
} # End of load xml scoping
#######################################################################
#######################################################################
##
## store_entry - accumulate data to be inserted into the database
##
## Pass no values in to clear accumulator
## Pass ($dbh,\%tables) to initiate storage of values
## Pass ($dbh,\%tables,\%data) to use normally
##
#######################################################################
#######################################################################
{
my @rows;
sub store_entry {
my $max_row_count = 100;
if (! @_) {
undef(@rows);
return '';
}
my ($dbh,$tables,$data) = @_;
return if (! defined($tables));
if (defined($data)) {
my $error;
foreach my $field ('student','resource','action','time') {
if (! defined($data->{$field}) || $data->{$field} eq ':' ||
$data->{$field}=~ /^\s*$/) {
$error.=$field.',';
}
}
if ($error) { $error=~s/,$//; return $error; }
#
my $student_id = &get_id($tables->{'student'},'student',
$data->{'student'});
my $res_id = &get_id($tables->{'res'},
'resource',$data->{'resource'});
my $machine_id = &get_id($tables->{'machine'},
'machine',$data->{'machine'});
my $idx = $data->{'idx'}; if (! $idx) { $idx = "''"; }
#
push(@rows,[$res_id,
qq{'$data->{'time'}'},
$student_id,
qq{'$data->{'action'}'},
$idx,
$machine_id,
$data->{'action_values'}]);
}
if (defined($tables) &&
( (! defined($data) && scalar(@rows)) || scalar(@rows)>$max_row_count)
){
# Store the rows
my $result =
&Apache::lonmysql::bulk_store_rows($tables->{'activity'},
undef,
\@rows);
if (! defined($result)) {
my $error = &Apache::lonmysql::get_error();
warn "Error occured during insert.".$error;
return $error;
}
undef(@rows);
return $result if (! defined($data));
}
return '';
}
} # end of scope for &store_entry
###############################################################
###############################################################
##
## The usual suspects
##
###############################################################
###############################################################
sub escape {
my $str=shift;
$str =~ s/(\W)/"%".unpack('H2',$1)/eg;
return $str;
}
sub unescape {
my $str=shift;
$str =~ s/%([a-fA-F0-9][a-fA-F0-9])/pack("C",hex($1))/eg;
return $str;
}
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>