- Create directory test1
mkdir test
- Create file test1.txt inside the test1 directory.
touch test1/test1.txt
- Create copy of folder test1 with name test2.
cp -R test1 test2
- Delete file test1.txt inside test2 directory.
rm test2/test1.txt
- Rename test2 folder into directory_without_file
mv /test2 /directory_without_files
- Insert 'test1' text into test1/test1.txt file.
echo "test1" > /test1/test1.txt
- print the text from the test1/test1.txt file.
cat /test1/test1.txt
test1
- Insert 'test2' into the end of test1/test1.txt file.
echo "test2" >> /test1/test1.txt
- print the text from the test1/test1.txt file.
cat /test1/test1.txt
test1
test2
- check the size of test1 directory
du -sh /test1
- Create test directory and block access for all to it.
mkdir test3
chmod 000 test3
- Try to remove that directory.
rmdir test3
- Create simple script which prints current date. Try to execute it.
- Count lines in the file test.txt.
wc -l test.txt
- Show last 3 lines from the test.txt file.
tail wc -n3 test.txt
- Hom many uniq IP addresses accessed the website ?
awk '{print $1}' test.txt | sort | uniq | wc -l
- IP address with most requests.
awk '{print $1}' test.txt | sort | uniq -c | sort -nr | head -n 1
- Top 3 IP addresses by amount of POST requests.
grep POST test.txt | awk '{print $1}' test.txt | sort | uniq -c | sort -nr | head -n 1
- Which IP addresses received 403 error ?
grep ' 403 ' test.txt | awk '{print $1}'
- Task with * . Write script to show which pages Google checked from the website
/robots.txt
/robots.txt
/ecjoy/17810tmrr2811359.htm
/robots.txt
/robots.txt
/economy/index.html
/casecamp/7039dcwb3api11-abwh-151-mbj9.htm
/robots.txt
/robots.txt
/ads.txt
/vitosaka/12219lspkds-1722307.htm
Replace IP address with most requests on 127.0.0.1 in test.txt file
awk '{print $1}' test.txt | sort | uniq -c | sort -nr | head -n 1
sed -i 's/114.119.140.234/127.0.0.1/g' test.txt