文章目录
一、先实现哈希桶
1.1哈希桶的实现方法
1.2日常普遍的哈希桶存放的数据有两种:字符串和整形
为了减少字符串的误判(比如"abcd" 和 “acbd”),我们每次加上一个字符串乘以一个数(这里我们可以查看字符串哈希算法)
这里我们每次就乘以31。
1.3哈希桶的实现代码+详解
1.3.1哈希桶的两种仿函数(int和string)
1.3.2哈希桶的节点(如果桶非常深,这里考虑挂红黑树)
1.3.3哈希表的构造函数、析构函数
1.3.4哈希表的查找和删除(和析构函数的原理类似)
这里有一点点的细节处理:需要判断该桶删除的位置到底是头,还是其他位置,分一下类
1.3.5哈希表的插入(这里插入需要判断总共桶的深度是否大于桶的长度,大于就扩容,桶太深不易于查找)
1.4哈希桶的测试
二、unordered_map、unordered_set的仿函数封装
2.1先封装仿函数
2.2加入迭代器(迭代器里面要存这张表,还有这个这个节点,然后还有对应的桶的序号)
2.3哈希桶改造(加入迭代器,为了使迭代器能够访问到表里面的私有成员,我们加了一个友元)
2.4unordered_map、unordered_set的内部封装
2.5const迭代器的改造
三、封装代码(比库里面的快一些,因为(在VS编辑器下)库里面的unordered_map、unordered_set里面需要多维护维护一个指针,这个指针是从插入第一个节点开始维护的)
unordered_map.h
#pragma once
#include"HashTable.h"
namespace SF
{
template<class K,class V,class Hash = HashFunc<K>>
class unordered_map
{
struct KeyOfMap
{
const K& operator()(const pair<K, V>& kv)
{
return kv.first;
}
};
public:
//HashTable<K, pair<const K, V>, Hash, KeyOfMap>
typedef typename hash_bucket::HashTable<K, pair<const K, V>, Hash, KeyOfMap>::iterator iterator;
iterator begin()
{
return _ht.begin();
}
iterator end()
{
return _ht.end();
}
V& operator[](const K& key)
{
pair<iterator, bool> ret = _ht.Insert(make_pair(key, V()));
return ret.first->second;
}
const V& operator[](const K& key) const
{
pair<iterator, bool> ret = _ht.Insert(make_pair(key,V()));
return ret.first->second;
}
pair<iterator, bool> insert(const pair<K,V>& data)
{
return _ht.Insert(data);
}
iterator find(const K& key)
{
return _ht.Find(key);
}
bool erase(const K& key)
{
return _ht.Erase(key);
}
private:
hash_bucket::HashTable<K, pair<const K,V>, Hash, KeyOfMap> _ht;
};
void test_map()
{
unordered_map<string, string> dict;
dict.insert(make_pair("sort",""));
dict.insert(make_pair("string","ַ"));
dict.insert(make_pair("insert",""));
auto iter = dict.begin();
for (auto& kv : dict)
{
//kv.first += 'x';
kv.second += 'x';
cout << kv.first << ":" << kv.second << endl;
}
cout << endl;
string arr[] = { "苹果", "苹果","苹果", "苹果", "香蕉", "香蕉", "香蕉", "西瓜", "西瓜", "桂林米粉", "桂林米粉", "桂林米粉", "北京烤鸭" };
unordered_map<string, int> count_map;
for (auto& e : arr)
{
count_map[e]++;
}
for (auto& kv : count_map)
{
cout << kv.first << ":" << kv.second << endl;
}
cout << endl;
}
}
unordered_set.h
#pragma once
#include"HashTable.h"
namespace SF
{
template<class K,class Hash = HashFunc<K>>
class unordered_set
{
struct KeyOfSet
{
const K& operator()(const K& key)
{
return key;
}
};
public:
typedef typename hash_bucket::HashTable<K,K, Hash, KeyOfSet>::iterator iterator;
typedef typename hash_bucket::HashTable<K,K, Hash, KeyOfSet>::const_iterator const_iterator;
iterator begin() {
return iterator(_ht.begin());
}
iterator end() {
return iterator(_ht.end());
}
const_iterator begin()const
{
return const_iterator(_ht.begin());
}
const_iterator end() const
{
return const_iterator(_ht.end());
}
pair<const_iterator, bool> insert(const K& key)
{
auto ret = _ht.Insert(key);
return pair<const_iterator, bool>(const_iterator(ret.first._pht,
ret.first._node, ret.first._hashi),
ret.second);
}
bool find(const K& key)
{
return _ht.Find(key);
}
bool erase(const K& key)
{
return _ht.Erase(key);
}
private:
hash_bucket::HashTable<K,K, HashFunc<K>, KeyOfSet> _ht;
};
void test_set()
{
unordered_set<int> us;
us.insert(5);
us.insert(15);
us.insert(52);
us.insert(3);
unordered_set<int>::iterator it = us.begin();
while (it != us.end())
{
//*it += 5;
cout << *it << " ";
++it;
}
cout << endl;
for (auto e : us)
{
cout << e << " ";
}
cout << endl;
}
}
HashTable.h
#pragma once
#include<iostream>
#include<vector>
#include<string>
using namespace std;
template<class K>
struct HashFunc
{
size_t operator()(const K& key)
{
return key;
}
};
template<>
struct HashFunc<string>//显示实例化
{
size_t operator()(const string& kv)
{
size_t hashi = 0;
for (auto e : kv)
{
hashi *= 31;
hashi += e;
}
return hashi;
}
};
namespace hash_bucket
{
template<class T>
struct HashNode
{
T _data;
HashNode<T>* _next;
HashNode(const T& data)
:_data(data)
,_next(nullptr)
{}
};
template<class K,class T,class Hash,class KeyOfT>
class HashTable;//声明类
template<class K,class T,class Ptr,class Ref,class KeyOfT,class Hash>
struct _HTIterator
{
typedef HashNode<T> Node;
public:
//迭代器里面需要指向该位置的节点,然后还需要这张表,不然无法判断下一个位置,
// 然后还需要该节点处于哪一个位置
typedef _HTIterator<K, T, Ptr, Ref, KeyOfT, Hash> Self;
typedef _HTIterator<K, T, T*, T&, KeyOfT, Hash> iterator;
size_t _hashi;//该节点在哪一个桶
Node* _node;
const HashTable<K, T, Hash, KeyOfT>* _pht;
_HTIterator(HashTable<K, T, Hash, KeyOfT>* pht,Node* node,size_t hashi)
:_pht(pht)
,_node(node)
,_hashi(hashi)
{}
_HTIterator(const HashTable<K, T, Hash, KeyOfT>* pht, Node* node, size_t hashi)
:_pht(pht)//避免权限放大的影响
, _node(node)
, _hashi(hashi)
{}
_HTIterator(const iterator& iter)
:_pht(iter._pht)
, _node(iter._node)
, _hashi(iter._hashi)
{}
bool operator!=(const Self& s)
{
return _node != s._node;
}
Ptr operator->()
{
return &_node->_data;
}
Ref operator*()
{
return _node->_data;
}
Self& operator++()
{
if (_node->_next)
{
// 当前桶还有节点,走到下一个节点
_node = _node->_next;
}
else
{
// 当前桶已经走完了,找下一个桶开始
//KeyOfT kot;
//Hash hf;
//size_t hashi = hf(kot(_node->_data)) % _pht._tables.size();
++_hashi;
while (_hashi < _pht->_tables.size())
{
if (_pht->_tables[_hashi])
{
_node = _pht->_tables[_hashi];
break;
}
++_hashi;
}
if (_hashi == _pht->_tables.size())
{
_node = nullptr;
}
}
return *this;
}
};
template<class K,class T,class Hash,class KeyOfT>
class HashTable
{
public:
typedef HashNode<T> Node;
template<class K,class T,class Ptr,class Ref,class KeyOfT,class Hash>
friend struct _HTIterator;
typedef _HTIterator<K, T, T*, T&, KeyOfT, Hash> iterator;
typedef _HTIterator<K, T,const T*, const T&, KeyOfT, Hash> const_iterator;
public:
iterator begin()
{
for (size_t i = 0; i < _tables.size(); i++)
{
if (_tables[i])
return iterator(this, _tables[i], i);
}
return end();
}
iterator end()
{
return iterator(this, nullptr, -1);
}
const_iterator begin() const
{
for (size_t i = 0; i < _tables.size(); i++)
{
if (_tables[i])
return const_iterator(this, _tables[i], i);
}
return end();
}
const_iterator end()const
{
return const_iterator(this, nullptr, -1);
}
HashTable()
{
_tables.resize(10, nullptr);
}
~HashTable()
{
for (size_t i = 0; i < _tables.size(); i++)
{
Node* cur = _tables[i];
if (cur)
{
Node* next = cur->_next;
delete cur;
cur = next;
}
_tables[i] = nullptr;
}
}
iterator Find(const K& key)
{
Hash hf;
KeyOfT kot;
size_t hashi = hf(key) % _tables.size();
Node* cur = _tables[hashi];
while (cur)
{
if (kot(cur->_data) == key)
{
return iterator(this, cur, hashi);
}
cur = cur->_next;
}
return end();
}
bool Erase(const K& key)
{
Hash hf;
KeyOfT kot;
size_t hashi = hf(key) % _tables.size();
Node* prev = nullptr;
Node* cur = _tables[hashi];
while (cur)
{
if (kot(cur->_data) == key)
{
if (prev == nullptr)
{
_tables[hashi] = cur->_next;
}
else
{
prev->_next = cur->_next;
}
delete cur;
return true;
}
prev = cur;
cur = cur->_next;
}
return false;
}
pair<iterator,bool> Insert(const T& data)
{
Hash hf;
KeyOfT kot;
iterator it = Find(kot(data));
if (it != end())
return make_pair(it,false);
if (_n == _tables.size())
{
HashTable<K, T, Hash,KeyOfT> newtable;
newtable._tables.resize(2 * _tables.size(), nullptr);
for (size_t i = 0; i < _tables.size(); i++)
{
Node* cur = _tables[i];
while (cur)
{
size_t hashi = hf(kot(cur->_data)) % newtable._tables.size();
newtable._tables[hashi] = cur;
cur = cur->_next;
}
_tables[i] = nullptr;
}
_tables.swap(newtable._tables);//转移资源
}
_n++;
size_t hashi = hf(kot(data)) % _tables.size();
Node* newnode = new Node(data);
//头插
newnode->_next = _tables[hashi];
_tables[hashi] = newnode;
return make_pair(iterator(this,newnode,hashi), true);
}
void Some()
{
size_t bucketSize = 0;
size_t maxBucketLen = 0;
size_t sum = 0;
double averageBucketLen = 0;
for (size_t i = 0; i < _tables.size(); i++)
{
Node* cur = _tables[i];
if (cur)
{
++bucketSize;
}
size_t bucketLen = 0;
while (cur)
{
++bucketLen;
cur = cur->_next;
}
sum += bucketLen;
if (bucketLen > maxBucketLen)
{
maxBucketLen = bucketLen;
}
}
averageBucketLen = (double)sum / (double)bucketSize;
printf("all bucketSize:%d\n", _tables.size());
printf("bucketSize:%d\n", bucketSize);
printf("maxBucketLen:%d\n", maxBucketLen);
printf("averageBucketLen:%lf\n\n", averageBucketLen);
}
private:
size_t _n = 0;
vector<Node*> _tables;
};
}
测试代码
#include"unorderset.h"
#include"unordermap.h"
int main()
{
SF::test_map();
SF::test_set();
return 0;
}