#!/bin/bash # start from here #cp /topex/ftp/pub/SAF_models/GPS/Herring_vel_edited_072011.txt . #* Long. Lat. E & N Rate E & N Adj. E & N +- RHO H Rate H adj. +- SITE #* (deg) (deg) (mm/yr) (mm/yr) (mm/yr) (mm/yr) # 1 2 3 4 5 6 7 8 9 10 11 12 13 # remove bad lines: the lines start with a "d" sed '/mm/d' < Herring_vel_edited_072011.txt > tmp cut -c2-130 tmp > Herring_vel_edited_072011.txt.tmp mv Herring_vel_edited_072011.txt.tmp Herring_vel_edited_072011.txt # window the region # use the ve_tot.grd to be the region limit. #awk '{printf("%8.3f %8.3f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f\n", $1-360,$2,$3,$4,$7,$8,$9,$10,$12)}' Herring_vel_edited_072011.txt > lonlat #grdtrack lonlat -Gve_tot.grd | awk '{print $1,$2,$3,$4,$5,$6,$7,$8,$9}' > tmp1 #awk '{printf("%8.3f %8.3f %s\n", $1-360,$2,$13)}' Herring_vel_edited_072011.txt > lonlat #grdtrack lonlat -Gve_tot.grd | awk '{print $3}' > tmp2 #awk '{printf("%8.3f %8.3f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f\n", $1-360,$2,$3,$4,$7,$8,$9,$10,$12)}' Herring_vel_edited_072011.txt > tmp1 awk '$1 < 245.995 && $1 > 235.604 && $2 > 31.045 && $2 < 40.336 {printf("%8.3f %8.3f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f\n", $1-360,$2,$3,$4,$7,$8,$9,$10,$12)}' Herring_vel_edited_072011.txt > tmp1 #awk '{print $13}' Herring_vel_edited_072011.txt > tmp2 awk '$1 < 245.995 && $1 > 235.604 && $2 > 31.045 && $2 < 40.336 {print $13}' Herring_vel_edited_072011.txt > tmp2 cut -c1-4 tmp2 > tmp3 # tmp3 has the site id # tmp1 has the following: lon, lat, Ve, Vn, Se, Sn, RHO, Vu, Su paste tmp3 tmp1 | awk '{print $1,$2,$3,$4,$5,$6,$7,$8,$9,$10}' | sort -n -k2 > tmp # remove repeated lines uniq -f 1 tmp > gps.txt # remove GR32 site as it looks strange # remove 05SK site as it is too different from Rolandone's paper grep GR32 gps.txt -v > tmp grep 05SK tmp -v > tmp2 mv tmp2 gps.txt rm tmp # remove LB11 sites as it is strange grep LB11 gps.txt -v > tmp mv tmp gps.txt # add in the campaign sites from Rolandone's paper (Table S3) awk '{print $1,$2,$3,$4,$5,$6*2,$7*2,0,0,0}' table.S3 > tmp grep BITT tmp -v > tmp2 cat gps.txt tmp2 > tmp3 mv tmp3 gps.txt # add in the campaign sites in South Calif. from Crowell's thesis awk 'NR>1 {print $1,$3,$2,$5+10.93,$4+8.33,$8*3,$7*3,0,0,0}' crowell_campaign_velocities.txt > tmp cat gps.txt tmp > tmp2 mv tmp2 gps.txt # 05WF near the Pacinas segment grep 05WF gps.txt -v > tmp mv tmp gps.txt # remove MIDE GILL LIME MIDA near Parkfield segment because they are too close to fault grep MIDE gps.txt -v | grep GILL -v | grep LIME -v | grep MIDA -v > tmp mv tmp gps.txt # remove P507 P495 near Brawley seismic zone because they are stragne grep P507 gps.txt -v | grep P496 -v > tmp mv tmp gps.txt # gps.txt has the following: id, lon, lat, Ve, Vn, Se, Sn, RHO, Vu, Su #awk '{print $2,$3,$4,$5,$6,$7,$8}' gps.txt > gps.dat #awk '{print $2,$3,$4,$5,$6,$7,$8}' gps.txt > gps.dat # the standard deviation of the GPS is at least 1.0 mm/yr rm gps.dat cat gps.txt | while read id lon lat ve vn se sn rho vu su do vx=`echo $ve | awk '{print $1+15.7}'` vy=`echo $vn | awk '{print $1-17.3}'` if [ $(echo "$se > 1.0" | bc) -eq 1 ] then if [ $(echo "$sn > 1.0" | bc) -eq 1 ] then echo $lon $lat $vx $vy $se $sn $rho >> gps.dat else echo $lon $lat $vx $vy $se 1.0 $rho >> gps.dat fi else if [ $(echo "$sn > 1.0" | bc) -eq 1 ] then echo $lon $lat $vx $vy 1.0 $sn $rho >> gps.dat else echo $lon $lat $vx $vy 1.0 1.0 $rho >> gps.dat fi fi done rm tmp1 tmp2 tmp tmp3 lonlat # rm postseismic slip from the Landers earthquake