Operation Oriented Web Applications / Yokohama pm7
Upcoming SlideShare
Loading in...5
×
 

Operation Oriented Web Applications / Yokohama pm7

on

  • 3,547 views

 

Statistics

Views

Total Views
3,547
Views on SlideShare
3,105
Embed Views
442

Actions

Likes
2
Downloads
10
Comments
0

6 Embeds 442

http://blog.nomadscafe.jp 429
http://webcache.googleusercontent.com 5
url_unknown 4
http://www.slideshare.net 2
http://reader.freerss.net 1
http://slide.yoshiday.net 1

Accessibility

Categories

Upload Details

Uploaded via as Apple Keynote

Usage Rights

© All Rights Reserved

Report content

Flagged as inappropriate Flag as inappropriate
Flag as inappropriate

Select your reason for flagging this presentation as inappropriate.

Cancel
  • Full Name Full Name Comment goes here.
    Are you sure you want to
    Your message goes here
    Processing…
Post Comment
Edit your comment
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n

Operation Oriented Web Applications / Yokohama pm7 Operation Oriented Web Applications / Yokohama pm7 Presentation Transcript

  • Operation Oriented Web Applications Yokohama.pm#7 @kazeburo
  • Log::Minimal
  • #!/usr/bin/env perluse strict;use warnings;use Log::Minimal;critf("%s","foo");warnf("%d %s", 1, "foo");sub hoge { infoff("foo"); debugff("bar");};hoge();local $Log::Minimal::AUTODUMP = 1;infof({ key => val });warnf("data is %s", { key => val });
  • $ LM_DEBUG=1 perl /tmp/logminimal.pl2011-05-11T15:57:49 [CRITICAL] foo at /tmp/logminimal.pl line 72011-05-11T15:57:49 [WARN] 1 foo at /tmp/logminimal.pl line 82011-05-11T15:57:49 [INFO] foo at /tmp/logminimal.pl line 11 ,/tmp/logminimal.pl line 142011-05-11T15:57:49 [DEBUG] bar at /tmp/logminimal.pl line 12 ,/tmp/logminimal.pl line 142011-05-11T15:57:49 [INFO] {key => val} at /tmp/logminimal.pl line 172011-05-11T15:57:49 [WARN] data is {key => val} at /tmp/logminimal.plline 18
  • local $Log::Minimal::PRINT = sub { my ( $time, $type, $message, $trace) = @_; print STDERR “[$type] $message $trace”;};local $Log::Minimal::LOG_LEVEL = "WARN";infof("foo"); #print nothingwarnf(“xaicron++”);
  • local $Log::Minimal::AUTODUMP = 1;warnf(“response => %s”,[ 200, [‘Content-Type’,‘text/plain’],[‘OK’]]);# 2011-05-11T15:56:14 [WARN] response => [200,[Content-Type,text/plain],[OK]] at ..sub myerror { local $Log::Minimal::TRACE_LEVEL = 1; infof(@_);}myerror(“foo”);
  • use Log::Minimal;use Plack::Builder;builder {    enable "Log::Minimal", autodump => 1;    sub {        my $env = shift;        warnf("warn message");        debugf("debug message");        ...    }};$ plackup -a demo.psgiHTTP::Server::PSGI: Accepting connections at http://0:5000/2011-05-11T16:32:24 [WARN] [/foo/bar/baz] warn message at /tmp/demo.psgi line 82011-05-11T16:32:24 [DEBUG] [/foo/bar/baz] debug message at /tmp/demo.psgi line 9
  • DBIx::Sunny
  • selectrow_arrayref($query, {}, @bind);selectrow_hashref($query, {}, @bind);selectall_arrayref($query, { Slice => {} }, @bind);prepare($query) && execute(@bind)
  • use DBIx::Sunny;my $dbh = DBIx::Sunny->connect(...);use DBI;use DBIx::Sunny;my $dbh = DBIx->connect(...,{ RootClass => ‘DBIx::Sunny’});
  • DBIx::Sunny::Schema
  • package NoNoPaste::Data;use parent qw/DBIx::Sunny::Schema/;__PACKAGE__->query( add_entry, id => Str, nick => { isa => Str, default => anonymouse }, body => Str, q{INSERT INTO entries ( id, nick, body, ctime ) values ( ?, ?, ?, NOW() )},);__PACKAGE__->select_row( entry, id => Uint, q{SELECT id,nick,body,ctime FROM entries WHERE id =?};);__PACKAGE__->select_all( entries_multi, id => { isa => ArrayRef[Uint] }, q{SELECT id,nick,body,ctime FROM entries WHERE id IN (?)});
  • use parent qw/DBIx::Sunny::Schema/;__PACKAGE__-> ( , => / , [ => / ,[..]], ‘ ’,);
  • prepare && execute && fetchrow_arrayref->[0];prepare && execute && fetchrow_hashref;prepare && execute && push @result, $_ while fetchrow_hash;prepare && execute
  • my $dbh = DBI->connect(...);my $master = NoNoPaste::Data->new( dbh => $dbh );# readonly querymy $slave = NoNoPaste::Data->new( dbh => $dbh, readonly => 1 );#my $row = $master->add_entry( id => $id, nick => $nick, body => $body,);#my $rows = $slave->entry_list( offset => $offset );
  • GreenBuckets
  • use Digest::MurmurHash qw/murmur_hash/;for ( 1..100 ) { say murmur_hash(sprintf "test%03d", $_ );}
  • use Digest::MurmurHash qw/murmur_hash/;for ( 1..100 ) { say murmur_hash(sprintf "test%03d", $_ );}
  • CREATE TABLE objects ( id INT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT, fid INT UNSIGNED NOT NULL, bucket_id INT UNSIGNED NOT NULL, rid SMALLINT UNSIGNED NOT NULL, gid SMALLINT UNSIGNED NOT NULL, filename VARCHAR(1024), INDEX (fid, bucket_id), INDEX (bucket_id)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_binSELECT * FROM objects WHERE fid = murmur($filename) ANDbucket_id = $bucket AND filename = $filename
  • CREATE TABLE entries_int ( id INT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT, fid INT UNSIGNED NOT NULL, bid INT UNSIGNED NOT NULL, filename VARCHAR(255) NOT NULL, INDEX (fid, bid), INDEX (bid)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_binCREATE TABLE entries_char ( id INT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT, filename VARCHAR(255) NOT NULL, bid INT UNSIGNED NOT NULL, UNIQUE INDEX (filename, bid), INDEX (bid)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin
  • .--------------------------------------------------.| table_name | total_kb | data_kb | index_kb |+--------------+-----------+-----------+-----------+| entries_char | 3360.0000 | 1552.0000 | 1808.0000 || entries_int | 2080.0000 | 1552.0000 | 528.0000 |--------------+-----------+-----------+----------- (filename 32 )
  • @nodes = sort { murmur_hash(join "/", $a->{node_id},$bucket,$filename) <=> murmur_hash(join "/", $b->{node_id},$bucket,$filename)} @nodes;