dtype.org
Log in

Difference between revisions of "Zare NAS Servers"

From dtype.org
(zare5)
(zare5)
Line 144: Line 144:
 
   guest ok = Yes
 
   guest ok = Yes
 
   browseable = Yes</nowiki>
 
   browseable = Yes</nowiki>
 +
 +
=== mdadm --detail ===
 +
 +
<nowiki>/dev/md0:
 +
        Version : 1.2
 +
  Creation Time : Fri Jun  7 19:22:55 2013
 +
    Raid Level : raid6
 +
    Array Size : 23441324160 (22355.39 GiB 24003.92 GB)
 +
  Used Dev Size : 3906887360 (3725.90 GiB 4000.65 GB)
 +
  Raid Devices : 8
 +
  Total Devices : 8
 +
    Persistence : Superblock is persistent
 +
 +
    Update Time : Sun Apr 30 13:21:47 2017
 +
          State : clean
 +
Active Devices : 8
 +
Working Devices : 8
 +
Failed Devices : 0
 +
  Spare Devices : 0
 +
 +
        Layout : left-symmetric
 +
    Chunk Size : 64K
 +
 +
          Name : zare5:0  (local to host zare5)
 +
          UUID : b7d27941:c794ebe6:8ee17e49:36839b35
 +
        Events : 3021
 +
 +
    Number  Major  Minor  RaidDevice State
 +
      0      8      16        0      active sync  /dev/sdb
 +
      8      8      32        1      active sync  /dev/sdc
 +
      2      8      48        2      active sync  /dev/sdd
 +
      3      8      64        3      active sync  /dev/sde
 +
      4      8      80        4      active sync  /dev/sdf
 +
      9      8      96        5      active sync  /dev/sdg
 +
      6      8      112        6      active sync  /dev/sdh
 +
      7      8      128        7      active sync  /dev/sdi
 +
/dev/md1:
 +
        Version : 1.2
 +
  Creation Time : Fri Jun  7 19:58:22 2013
 +
    Raid Level : raid6
 +
    Array Size : 23441324160 (22355.39 GiB 24003.92 GB)
 +
  Used Dev Size : 3906887360 (3725.90 GiB 4000.65 GB)
 +
  Raid Devices : 8
 +
  Total Devices : 8
 +
    Persistence : Superblock is persistent
 +
 +
    Update Time : Sun Apr 30 13:22:22 2017
 +
          State : clean
 +
Active Devices : 8
 +
Working Devices : 8
 +
Failed Devices : 0
 +
  Spare Devices : 0
 +
 +
        Layout : left-symmetric
 +
    Chunk Size : 64K
 +
 +
          Name : zare5:1  (local to host zare5)
 +
          UUID : d78ab42d:0309f981:a1c345e7:77a699ca
 +
        Events : 8090
 +
 +
    Number  Major  Minor  RaidDevice State
 +
      0      8      144        0      active sync  /dev/sdj
 +
      8      8      160        1      active sync  /dev/sdk
 +
      2      8      192        2      active sync  /dev/sdm
 +
      3      8      176        3      active sync  /dev/sdl
 +
      4      8      208        4      active sync  /dev/sdn
 +
      9      8      224        5      active sync  /dev/sdo
 +
      6      8      240        6      active sync  /dev/sdp
 +
      7      65        0        7      active sync  /dev/sdq
 +
/dev/md2:
 +
        Version : 1.2
 +
  Creation Time : Sun Apr 30 11:33:45 2017
 +
    Raid Level : raid6
 +
    Array Size : 58597834752 (55883.25 GiB 60004.18 GB)
 +
  Used Dev Size : 9766305792 (9313.88 GiB 10000.70 GB)
 +
  Raid Devices : 8
 +
  Total Devices : 8
 +
    Persistence : Superblock is persistent
 +
 +
  Intent Bitmap : Internal
 +
 +
    Update Time : Sun Apr 30 14:39:51 2017
 +
          State : active, resyncing
 +
Active Devices : 8
 +
Working Devices : 8
 +
Failed Devices : 0
 +
  Spare Devices : 0
 +
 +
        Layout : left-symmetric
 +
    Chunk Size : 64K
 +
 +
  Resync Status : 15% complete
 +
 +
          Name : zare5:2  (local to host zare5)
 +
          UUID : 61ed4956:6127018f:ea4d9426:f7a06dd6
 +
        Events : 2173
 +
 +
    Number  Major  Minor  RaidDevice State
 +
      0      65      16        0      active sync  /dev/sdr
 +
      1      65      32        1      active sync  /dev/sds
 +
      2      65      48        2      active sync  /dev/sdt
 +
      3      65      64        3      active sync  /dev/sdu
 +
      4      65      80        4      active sync  /dev/sdv
 +
      5      65      96        5      active sync  /dev/sdw
 +
      6      65      112        6      active sync  /dev/sdx
 +
      7      65      128        7      active sync  /dev/sdy</nowiki>

Revision as of 21:40, 30 April 2017

I put together storage for video for Cathedral Of Faith, where I also act as a volunteer camera operator.

Original Zare NAS setup 2010

zare4

mdadm.conf

# mdadm.conf
CREATE owner=root group=disk mode=0660 auto=yes
HOMEHOST <system>
MAILADDR root
ARRAY /dev/md/0 metadata=1.2 UUID=7290e777:ec536a62:2c14867c:65f03fcb name=zare4:0
ARRAY /dev/md/1 metadata=1.2 UUID=99f19493:fcd2742f:67f4594d:1d266851 name=zare4:1
ARRAY /dev/md/2 metadata=1.2 UUID=5496c365:497b6097:2dfe14ca:a83c0836 name=zare4:2

/proc/mdstat

root@zare4:/etc# cat /proc/mdstat
Personalities : [raid6] [raid5] [raid4] [linear] [multipath] [raid0] [raid1] [raid10] 
md0 : active raid6 sdc[2] sde[1] sdb[0] sdd[3] sdh[6] sdi[7] sdg[5] sdf[4]
      17581592448 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]
      
md2 : active raid6 sdy[7] sdr[0] sdu[3] sdw[5] sdv[4] sdx[6] sds[1] sdt[2]
      35162348160 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]
      
md1 : active raid6 sdq[7] sdm[3] sdk[1] sdo[6] sdp[5] sdl[2] sdn[4] sdj[0]
      17581592448 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]

zare5

Creation of md2, fs3

root@zare5:~# mdadm --create /dev/md2 --chunk=64 --level=6 --raid-devices=8 /dev/sdr /dev/sds /dev/sdt /dev/sdu /dev/sdv /dev/sdw /dev/sdx /dev/sdy

root@zare5:~# mkfs.xfs -d sunit=128,swidth=768 -f /dev/md2
meta-data=/dev/md2               isize=512    agcount=55, agsize=268435440 blks
         =                       sectsz=4096  attr=2, projid32bit=1
         =                       crc=1        finobt=1, sparse=0
data     =                       bsize=4096   blocks=14649458688, imaxpct=1
         =                       sunit=16     swidth=96 blks
naming   =version 2              bsize=4096   ascii-ci=0 ftype=1
log      =internal log           bsize=4096   blocks=521728, version=2
         =                       sectsz=4096  sunit=1 blks, lazy-count=1
realtime =none                   extsz=4096   blocks=0, rtextents=0

mdadm.conf

root@zare5:/etc# cat mdadm/mdadm.conf 
# auto-create devices with Debian standard permissions
CREATE owner=root group=disk mode=0660 auto=yes

# automatically tag new arrays as belonging to the local system
HOMEHOST <system>

# instruct the monitoring daemon where to send mail alerts
MAILADDR root

# definitions of existing MD arrays
ARRAY /dev/md/0 metadata=1.2 UUID=b7d27941:c794ebe6:8ee17e49:36839b35 name=zare5:0
ARRAY /dev/md/1 metadata=1.2 UUID=d78ab42d:0309f981:a1c345e7:77a699ca name=zare5:1
ARRAY /dev/md2 metadata=1.2 name=zare5:2 UUID=61ed4956:6127018f:ea4d9426:f7a06dd6

/proc/mdstat

root@zare5:/opt# cat /proc/mdstat
Personalities : [raid6] [raid5] [raid4] [linear] [multipath] [raid0] [raid1] [raid10] 
md2 : active raid6 sdy[7] sdx[6] sdw[5] sdv[4] sdu[3] sdt[2] sds[1] sdr[0]
      58597834752 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]
      [==>..................]  resync = 10.6% (1037657736/9766305792) finish=1058.5min speed=137427K/sec
      bitmap: 66/73 pages [264KB], 65536KB chunk

md0 : active raid6 sdd[2] sdi[7] sdc[8] sdb[0] sdg[9] sdf[4] sde[3] sdh[6]
      23441324160 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]
      
md1 : active raid6 sdj[0] sdo[9] sdn[4] sdp[6] sdk[8] sdq[7] sdm[2] sdl[3]
      23441324160 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU]
      
unused devices: <none>

/etc/samba/smb.conf

[global]
   workgroup = WORKGROUP
   server string = zare5 server
   dns proxy = no
   log file = /var/log/samba.log
   max log size = 10000
   syslog = 0
   panic action = /usr/share/samba/panic-action %d

   encrypt passwords = true
   passdb backend = tdbsam
   obey pam restrictions = yes
   unix password sync = yes

   passwd program = /usr/bin/passwd %u
   passwd chat = *Enter\snew\s*\spassword:* %n\n *Retype\snew\s*\spassword:* %n\n *password\supdated\ssuccessfully* .
   map to guest = bad user
   usershare allow guests = yes

[zshare5a-write]
  path = /opt/fs1/
  read only = No
  guest ok = No
  create mask = 0664
  directory mask = 0775

[zshare5a-read]
  path = /opt/fs1/
  force user = samba-anon
  force group = samba-anon
  read only = Yes
  guest ok = Yes
  browseable = Yes

[zshare5b-write]
  path = /opt/fs2/
  read only = No
  guest ok = No
  create mask = 0664
  directory mask = 0775

[zshare5b-read]
  path = /opt/fs2/
  force user = samba-anon
  force group = samba-anon
  read only = Yes
  guest ok = Yes
  browseable = Yes

[zshare5c-write]
  path = /opt/fs3/
  read only = No
  guest ok = No
  create mask = 0664
  directory mask = 0775

[zshare5c-read]
  path = /opt/fs3/
  force user = samba-anon
  force group = samba-anon
  read only = Yes
  guest ok = Yes
  browseable = Yes

mdadm --detail

/dev/md0:
        Version : 1.2
  Creation Time : Fri Jun  7 19:22:55 2013
     Raid Level : raid6
     Array Size : 23441324160 (22355.39 GiB 24003.92 GB)
  Used Dev Size : 3906887360 (3725.90 GiB 4000.65 GB)
   Raid Devices : 8
  Total Devices : 8
    Persistence : Superblock is persistent

    Update Time : Sun Apr 30 13:21:47 2017
          State : clean 
 Active Devices : 8
Working Devices : 8
 Failed Devices : 0
  Spare Devices : 0

         Layout : left-symmetric
     Chunk Size : 64K

           Name : zare5:0  (local to host zare5)
           UUID : b7d27941:c794ebe6:8ee17e49:36839b35
         Events : 3021

    Number   Major   Minor   RaidDevice State
       0       8       16        0      active sync   /dev/sdb
       8       8       32        1      active sync   /dev/sdc
       2       8       48        2      active sync   /dev/sdd
       3       8       64        3      active sync   /dev/sde
       4       8       80        4      active sync   /dev/sdf
       9       8       96        5      active sync   /dev/sdg
       6       8      112        6      active sync   /dev/sdh
       7       8      128        7      active sync   /dev/sdi
/dev/md1:
        Version : 1.2
  Creation Time : Fri Jun  7 19:58:22 2013
     Raid Level : raid6
     Array Size : 23441324160 (22355.39 GiB 24003.92 GB)
  Used Dev Size : 3906887360 (3725.90 GiB 4000.65 GB)
   Raid Devices : 8
  Total Devices : 8
    Persistence : Superblock is persistent

    Update Time : Sun Apr 30 13:22:22 2017
          State : clean 
 Active Devices : 8
Working Devices : 8
 Failed Devices : 0
  Spare Devices : 0

         Layout : left-symmetric
     Chunk Size : 64K

           Name : zare5:1  (local to host zare5)
           UUID : d78ab42d:0309f981:a1c345e7:77a699ca
         Events : 8090

    Number   Major   Minor   RaidDevice State
       0       8      144        0      active sync   /dev/sdj
       8       8      160        1      active sync   /dev/sdk
       2       8      192        2      active sync   /dev/sdm
       3       8      176        3      active sync   /dev/sdl
       4       8      208        4      active sync   /dev/sdn
       9       8      224        5      active sync   /dev/sdo
       6       8      240        6      active sync   /dev/sdp
       7      65        0        7      active sync   /dev/sdq
/dev/md2:
        Version : 1.2
  Creation Time : Sun Apr 30 11:33:45 2017
     Raid Level : raid6
     Array Size : 58597834752 (55883.25 GiB 60004.18 GB)
  Used Dev Size : 9766305792 (9313.88 GiB 10000.70 GB)
   Raid Devices : 8
  Total Devices : 8
    Persistence : Superblock is persistent

  Intent Bitmap : Internal

    Update Time : Sun Apr 30 14:39:51 2017
          State : active, resyncing 
 Active Devices : 8
Working Devices : 8
 Failed Devices : 0
  Spare Devices : 0

         Layout : left-symmetric
     Chunk Size : 64K

  Resync Status : 15% complete

           Name : zare5:2  (local to host zare5)
           UUID : 61ed4956:6127018f:ea4d9426:f7a06dd6
         Events : 2173

    Number   Major   Minor   RaidDevice State
       0      65       16        0      active sync   /dev/sdr
       1      65       32        1      active sync   /dev/sds
       2      65       48        2      active sync   /dev/sdt
       3      65       64        3      active sync   /dev/sdu
       4      65       80        4      active sync   /dev/sdv
       5      65       96        5      active sync   /dev/sdw
       6      65      112        6      active sync   /dev/sdx
       7      65      128        7      active sync   /dev/sdy