需要先在linux 上创建一个普通用户: hadoop-cw,并修改密码
[root@cm1 ~]# useradd hadoop-cw
[root@cm1 ~]# passwd hadoop-cw
Changing password for user hadoop-cw.
New password:
BAD PASSWORD: The password is shorter than 8 characters
Retype new password:
passwd: all authentication tokens updated successfully.
然后在hdfs上/user目录下创建用户目录
[root@cm1 ~]# sudo -u hdfs hdfs dfs -mkdir /user/hadoop-cw
[root@cm1 ~]# hdfs dfs -ls /user
Found 7 items
drwxr-xr-x - hdfs supergroup 0 2019-11-17 18:16 /user/hadoop-cw
drwx------ - hdfs supergroup 0 2019-11-15 21:36 /user/hdfs
drwxrwxrwx - mapred hadoop 0 2019-11-15 20:10 /user/history
drwxrwxr-t - hive hive 0 2019-11-15 20:10 /user/hive
drwxrwxr-x - hue hue 0 2019-11-15 20:11 /user/hue
drwxr-x--x - spark spark 0 2019-11-15 20:09 /user/spark
drwxr-xr-x - hdfs supergroup 0 2019-11-15 20:09 /user/yarn
[root@cm1 ~]# sudo -u hdfs hdfs dfs -chown hadoop-cw:hdfs /user/hadoop-cw
[root@cm1 ~]# hdfs dfs -ls /user
Found 7 items
drwxr-xr-x - hadoop-cw hdfs 0 2019-11-17 18:16 /user/hadoop-cw
drwx------ - hdfs supergroup 0 2019-11-15 21:36 /user/hdfs
drwxrwxrwx - mapred hadoop 0 2019-11-15 20:10 /user/history
drwxrwxr-t - hive hive 0 2019-11-15 20:10 /user/hive
drwxrwxr-x - hue hue 0 2019-11-15 20:11 /user/hue
drwxr-x--x - spark spark 0 2019-11-15 20:09 /user/spark
drwxr-xr-x - hdfs supergroup 0 2019-11-15 20:09 /user/yarn
上传文件测试
[root@cm1 ~]# hdfs dfs -put 1.txt /user/hadoop-cw
[root@cm1 ~]# hdfs dfs -ls /user/hadoop-cw
Found 1 items
-rw-r--r-- 3 hdfs hdfs 12 2019-11-17 18:23 /user/hadoop-cw/1.txt