1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18 package org.apache.hadoop.hbase.util;
19
20 import java.io.IOException;
21
22 import org.apache.hadoop.conf.Configuration;
23 import org.apache.hadoop.fs.Path;
24 import org.apache.hadoop.hbase.TableName;
25 import org.apache.hadoop.hbase.HConstants;
26 import org.apache.hadoop.hbase.HRegionInfo;
27 import org.apache.hadoop.hbase.regionserver.HRegion;
28 import org.apache.hadoop.hbase.regionserver.HStore;
29
30 /**
31 * Helper class for all utilities related to archival/retrieval of HFiles
32 */
33 public class HFileArchiveUtil {
34 private HFileArchiveUtil() {
35 // non-external instantiation - util class
36 }
37
38 /**
39 * Get the directory to archive a store directory
40 * @param conf {@link Configuration} to read for the archive directory name
41 * @param tableName table name under which the store currently lives
42 * @param regionName region encoded name under which the store currently lives
43 * @param familyName name of the family in the store
44 * @return {@link Path} to the directory to archive the given store or
45 * <tt>null</tt> if it should not be archived
46 */
47 public static Path getStoreArchivePath(final Configuration conf,
48 final TableName tableName,
49 final String regionName, final String familyName) throws IOException {
50 Path tableArchiveDir = getTableArchivePath(conf, tableName);
51 return HStore.getStoreHomedir(tableArchiveDir, regionName, Bytes.toBytes(familyName));
52 }
53
54 /**
55 * Get the directory to archive a store directory
56 * @param conf {@link Configuration} to read for the archive directory name.
57 * @param region parent region information under which the store currently lives
58 * @param tabledir directory for the table under which the store currently lives
59 * @param family name of the family in the store
60 * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should
61 * not be archived
62 */
63 public static Path getStoreArchivePath(Configuration conf,
64 HRegionInfo region,
65 Path tabledir,
66 byte[] family) throws IOException {
67 Path rootDir = FSUtils.getRootDir(conf);
68 Path tableArchiveDir = getTableArchivePath(rootDir, region.getTable());
69 return HStore.getStoreHomedir(tableArchiveDir, region, family);
70 }
71
72 /**
73 * Get the archive directory for a given region under the specified table
74 * @param tableName the table name. Cannot be null.
75 * @param regiondir the path to the region directory. Cannot be null.
76 * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it
77 * should not be archived
78 */
79 public static Path getRegionArchiveDir(Path rootDir,
80 TableName tableName,
81 Path regiondir) {
82 // get the archive directory for a table
83 Path archiveDir = getTableArchivePath(rootDir, tableName);
84
85 // then add on the region path under the archive
86 String encodedRegionName = regiondir.getName();
87 return HRegion.getRegionDir(archiveDir, encodedRegionName);
88 }
89
90 /**
91 * Get the archive directory for a given region under the specified table
92 * @param rootDir {@link Path} to the root directory where hbase files are stored (for building
93 * the archive path)
94 * @param tableName name of the table to archive. Cannot be null.
95 * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it
96 * should not be archived
97 */
98 public static Path getRegionArchiveDir(Path rootDir,
99 TableName tableName, String encodedRegionName) {
100 // get the archive directory for a table
101 Path archiveDir = getTableArchivePath(rootDir, tableName);
102 return HRegion.getRegionDir(archiveDir, encodedRegionName);
103 }
104
105 /**
106 * Get the path to the table archive directory based on the configured archive directory.
107 * <p>
108 * Get the path to the table's archive directory.
109 * <p>
110 * Generally of the form: /hbase/.archive/[tablename]
111 * @param rootdir {@link Path} to the root directory where hbase files are stored (for building
112 * the archive path)
113 * @param tableName Name of the table to be archived. Cannot be null.
114 * @return {@link Path} to the archive directory for the table
115 */
116 public static Path getTableArchivePath(final Path rootdir, final TableName tableName) {
117 return FSUtils.getTableDir(getArchivePath(rootdir), tableName);
118 }
119
120 /**
121 * Get the path to the table archive directory based on the configured archive directory.
122 * <p>
123 * Assumed that the table should already be archived.
124 * @param conf {@link Configuration} to read the archive directory property. Can be null
125 * @param tableName Name of the table to be archived. Cannot be null.
126 * @return {@link Path} to the archive directory for the table
127 */
128 public static Path getTableArchivePath(final Configuration conf,
129 final TableName tableName)
130 throws IOException {
131 return FSUtils.getTableDir(getArchivePath(conf), tableName);
132 }
133
134 /**
135 * Get the full path to the archive directory on the configured
136 * {@link org.apache.hadoop.hbase.master.MasterFileSystem}
137 * @param conf to look for archive directory name and root directory. Cannot be null. Notes for
138 * testing: requires a FileSystem root directory to be specified.
139 * @return the full {@link Path} to the archive directory, as defined by the configuration
140 * @throws IOException if an unexpected error occurs
141 */
142 public static Path getArchivePath(Configuration conf) throws IOException {
143 return getArchivePath(FSUtils.getRootDir(conf));
144 }
145
146 /**
147 * Get the full path to the archive directory on the configured
148 * {@link org.apache.hadoop.hbase.master.MasterFileSystem}
149 * @param rootdir {@link Path} to the root directory where hbase files are stored (for building
150 * the archive path)
151 * @return the full {@link Path} to the archive directory, as defined by the configuration
152 */
153 private static Path getArchivePath(final Path rootdir) {
154 return new Path(rootdir, HConstants.HFILE_ARCHIVE_DIRECTORY);
155 }
156 }