# # Director configuration # Director { # define myself Name = file1.mycompany.net-dir DIRport = 9101 # where we listen for UA connections QueryFile = "/usr/local/share/bacula/query.sql" WorkingDirectory = "/var/db/bacula" PidDirectory = "/var/run" Maximum Concurrent Jobs = 20 Password = "No32vQp0JxdXx4r4PgRus5vnhj4JTD1eDPrRRAsUzyUw" # Console password Messages = Daemon } # # Job definition templates # JobDefs { Name = "DefaultJob" Type = Backup Level = Incremental Client = file1.mycompany.net-fd FileSet = "OwnCloud" Schedule = "WeeklyCycle" Storage = File1 Messages = Standard Pool = File Spool Data = no Spool Attributes = yes Priority = 10 Write Bootstrap = "/var/db/bacula/%c.bsr" Reschedule On Error = yes Reschedule Interval = 1 hour Reschedule Times = 2 } # # Backup jobs # Job { Name = "File1" JobDefs = "DefaultJob" Level = Full FileSet="File1" Schedule = "WeeklyCycleAfterBackup" RunBeforeJob = "/usr/local/share/bacula/make_catalog_backup.pl MyCatalog" RunAfterJob = "/bin/rm /usr/local/var/lib/bacula/bacula.sql" Write Bootstrap = "/var/db/bacula/%n.bsr" Priority = 11 # run after main backup } Job { Name = "BBM" Client = bbm.mycompany.net-fd JobDefs = "DefaultJob" ClientRunBeforeJob = "/usr/local/sbin/dump_db.sh" ClientRunAfterJob = "/usr/local/sbin/delete_db.sh" } Job { Name = "Cloud1" Client = cloud1.mycompany.net-fd JobDefs = "DefaultJob" ClientRunBeforeJob = "/usr/local/sbin/dump_db.sh" ClientRunAfterJob = "/usr/local/sbin/delete_db.sh" } Job { Name = "RestoreFiles" Type = Restore Client=file1.mycompany.net-fd FileSet="OwnCloud" Storage = File1 Pool = File Messages = Standard Where = /tmp/bacula-restores/ } # # Fileset definitions # FileSet { Name = "OwnCloud" Include { Options { signature = MD5 compression = LZO } File = /root/ File = /etc/ File = /usr/local/ File = /var/www/ File = /data1/ File = /tmp/mysql.all-databases.sql.gz } } FileSet { Name = "File1" Include { Options { signature = MD5 compression = LZO } File = /usr/local/var/lib/bacula/bacula.sql File = /var/db/bacula/ File = /usr/local/etc/ File = /etc/ File = /root/ } } # # Schedules # Schedule { Name = "WeeklyCycle" Run = Full sun at 00:00 Run = Differential mon-sat at 00:00 } Schedule { Name = "WeeklyCycleAfterBackup" Run = Full sun-sat at 01:00 } # # Client definitions # Client { Name = file1.mycompany.net-fd Address = file1.mycompany.net FDPort = 9102 Catalog = MyCatalog Password = "43Ugukt7sM9z8IYC3D0QhcssdYzseipF/qLpsn8YEJnE" File Retention = 14 days # 14 days Job Retention = 14 days # 14 days AutoPrune = yes # Prune expired Jobs/Files } Client { Name = bbm.mycompany.net-fd Address = bbm.mycompany.net FDPort = 9102 Catalog = MyCatalog Password = "43Ugukt7sM9z8IYC3D0QhcssdYzseipF/qLpsn8YEJnE2" File Retention = 14 days Job Retention = 14 days AutoPrune = yes } Client { Name = cloud1.mycompany.net-fd Address = cloud1.mycompany.net FDPort = 9102 Catalog = MyCatalog Password = "Fki1Lu/2eVq0tTbstY1uhYvSNgSEypw4MmHQJoyDwcKJ" File Retention = 14 days Job Retention = 14 days AutoPrune = yes } # # Definition of file Virtual Autochanger device # Storage { Name = File1 Address = file1.mycompany.net # N.B. Use a fully qualified name here SDPort = 9103 Password = "FNOqO5C3h9QGMnJhQtVSebJjabCQ5GjgEzwd8dKj9Cy4" Device = File1-Disk1 Media Type = Disk Maximum Concurrent Jobs = 10 } # # Catalog service # Catalog { Name = MyCatalog dbname = "bacula"; dbuser = "bacula"; dbpassword = "qskjf8wjdjdxj4" } # Reasonable message delivery -- send most everything to email address # and to the console Messages { Name = Standard # # NOTE! If you send to two email or more email addresses, you will need # to replace the %r in the from field (-f part) with a single valid # email address in both the mailcommand and the operatorcommand. # What this does is, it sets the email address that emails would display # in the FROM field, which is by default the same email as they're being # sent to. However, if you send email to more than one address, then # you'll have to set the FROM address manually, to a single address. # for example, a 'no-reply AT mydomain DOT com', is better since that tends to # tell (most) people that its coming from an automated source. # mailcommand = "/usr/local/sbin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula: %t %e of %c %l\" %r" operatorcommand = "/usr/local/sbin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula: Intervention needed for %j\" %r" mail = root AT mycompany DOT nl = all, !skipped operator = root AT mycompany DOT nl = mount console = all, !skipped, !saved # # WARNING! the following will create a file that you must cycle from # time to time as it will grow indefinitely. However, it will # also keep all your messages if they scroll off the console. # append = "/var/log/bacula.log" = all, !skipped catalog = all } # # Message delivery for daemon messages (no job). Messages { Name = Daemon mailcommand = "/usr/local/sbin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula daemon message\" %r" mail = root AT mycompany DOT nl = all, !skipped console = all, !skipped, !saved append = "/var/log/bacula.log" = all, !skipped } # File Pool definition Pool { Name = File Pool Type = Backup Recycle = yes # Bacula can automatically recycle Volumes AutoPrune = yes # Prune expired volumes Action On Purge = Truncate Volume Retention = 14 days # Max retention Maximum Volume Bytes = 50G # Limit Volume size to something reasonable Maximum Volumes = 100 # Limit number of Volumes in Pool Label Format = "Vol-" # Auto label } # Scratch pool definition Pool { Name = Scratch Pool Type = Backup }