Skip to content

Commit

Permalink
Lab 8 Skeleton Code
Browse files Browse the repository at this point in the history
  • Loading branch information
neil-kulkarni committed Mar 6, 2021
1 parent 9b67b51 commit aa255c2
Show file tree
Hide file tree
Showing 17 changed files with 1,387 additions and 0 deletions.
49 changes: 49 additions & 0 deletions lab8/hashmap/Map61B.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package hashmap;

import java.util.Set;
/**
* Your implementation hashmap.MyHashMap should implement this interface. To do so,
* append "implements hashmap.Map61B<K, V>" to the end of your "public class..."
* declaration, though you can use other formal type parameters if you'd like.
*/
public interface Map61B<K, V> extends Iterable<K> {
/** Removes all of the mappings from this map. */
void clear();

/** Returns true if this map contains a mapping for the specified key. */
boolean containsKey(K key);

/**
* Returns the value to which the specified key is mapped, or null if this
* map contains no mapping for the key.
*/
V get(K key);

/** Returns the number of key-value mappings in this map. */
int size();

/**
* Associates the specified value with the specified key in this map.
* If the map previously contained a mapping for the key,
* the old value is replaced.
*/
void put(K key, V value);

/** Returns a Set view of the keys contained in this map. */
Set<K> keySet();

/**
* Removes the mapping for the specified key from this map if present.
* Not required for Lab 8. If you don't implement this, throw an
* UnsupportedOperationException.
*/
V remove(K key);

/**
* Removes the entry for the specified key only if it is currently mapped to
* the specified value. Not required for Lab 8. If you don't implement this,
* throw an UnsupportedOperationException.
*/
V remove(K key, V value);
}

91 changes: 91 additions & 0 deletions lab8/hashmap/MyHashMap.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
package hashmap;

import java.util.Collection;

/**
* A hash table-backed Map implementation. Provides amortized constant time
* access to elements via get(), remove(), and put() in the best case.
*
* Assumes null keys will never be inserted, and does not resize down upon remove().
* @author YOUR NAME HERE
*/
public class MyHashMap<K, V> implements Map61B<K, V> {

/**
* Protected helper class to store key/value pairs
* The protected qualifier allows subclass access
*/
protected class Node {
K key;
V value;

Node(K k, V v) {
key = k;
value = v;
}
}

/* Instance Variables */
private Collection<Node>[] buckets;
// You should probably define some more!

/** Constructors */
public MyHashMap() { }

public MyHashMap(int initialSize) { }

/**
* MyHashMap constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMap(int initialSize, double maxLoad) { }

/**
* Returns a new node to be placed in a hash table bucket
*/
private Node createNode(K key, V value) {
return null;
}

/**
* Returns a data structure to be a hash table bucket
*
* The only requirements of a hash table bucket are that we can:
* 1. Insert items (`add` method)
* 2. Remove items (`remove` method)
* 3. Iterate through items (`iterator` method)
*
* Each of these methods is supported by java.util.Collection,
* Most data structures in Java inherit from Collection, so we
* can use almost any data structure as our buckets.
*
* Override this method to use different data structures as
* the underlying bucket type
*
* BE SURE TO CALL THIS FACTORY METHOD INSTEAD OF CREATING YOUR
* OWN BUCKET DATA STRUCTURES WITH THE NEW OPERATOR!
*/
protected Collection<Node> createBucket() {
return null;
}

/**
* Returns a table to back our hash table. As per the comment
* above, this table can be an array of Collection objects
*
* BE SURE TO CALL THIS FACTORY METHOD WHEN CREATING A TABLE SO
* THAT ALL BUCKET TYPES ARE OF JAVA.UTIL.COLLECTION
*
* @param tableSize the size of the table to create
*/
private Collection<Node>[] createTable(int tableSize) {
return null;
}

// TODO: Implement the methods of the Map61B Interface below
// Your code won't compile until you do so!

}
45 changes: 45 additions & 0 deletions lab8/hashmap/MyHashMapALBuckets.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package hashmap;

import java.util.ArrayList;
import java.util.Collection;

/**
* Hash Table with Array List buckets
* @author Neil Kulkarni
*/
public class MyHashMapALBuckets<K, V> extends MyHashMap<K, V> {

/**
* Constructor that creates a backing array with default
* initial size and load factor
*/
public MyHashMapALBuckets() {
super();
}

/**
* Constructor that creates a backing array of initialSize
* and default load factor
*
* @param initialSize initial size of backing array
*/
public MyHashMapALBuckets(int initialSize) {
super(initialSize);
}

/**
* Constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMapALBuckets(int initialSize, double maxLoad) {
super(initialSize, maxLoad);
}

@Override
protected Collection<Node> createBucket() {
return new ArrayList<>();
}
}
45 changes: 45 additions & 0 deletions lab8/hashmap/MyHashMapHSBuckets.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package hashmap;

import java.util.HashSet;
import java.util.Collection;

/**
* Hash Table with buckets that are Hash Sets (trippy!)
* @author Neil Kulkarni
*/
public class MyHashMapHSBuckets<K, V> extends MyHashMap<K, V> {

/**
* Constructor that creates a backing array with default
* initial size and load factor
*/
public MyHashMapHSBuckets() {
super();
}

/**
* Constructor that creates a backing array of initialSize
* and default load factor
*
* @param initialSize initial size of backing array
*/
public MyHashMapHSBuckets(int initialSize) {
super(initialSize);
}

/**
* Constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMapHSBuckets(int initialSize, double maxLoad) {
super(initialSize, maxLoad);
}

@Override
protected Collection<Node> createBucket() {
return new HashSet<>();
}
}
45 changes: 45 additions & 0 deletions lab8/hashmap/MyHashMapLLBuckets.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package hashmap;

import java.util.LinkedList;
import java.util.Collection;

/**
* Hash Table with Linked List buckets
* @author Neil Kulkarni
*/
public class MyHashMapLLBuckets<K, V> extends MyHashMap<K, V> {

/**
* Constructor that creates a backing array with default
* initial size and load factor
*/
public MyHashMapLLBuckets() {
super();
}

/**
* Constructor that creates a backing array of initialSize
* and default load factor
*
* @param initialSize initial size of backing array
*/
public MyHashMapLLBuckets(int initialSize) {
super(initialSize);
}

/**
* Constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMapLLBuckets(int initialSize, double maxLoad) {
super(initialSize, maxLoad);
}

@Override
protected Collection<Node> createBucket() {
return new LinkedList<>();
}
}
56 changes: 56 additions & 0 deletions lab8/hashmap/MyHashMapPQBuckets.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package hashmap;

import java.util.Collection;
import java.util.Comparator;
import java.util.PriorityQueue;

/**
* Hash Table with Priority Queue buckets
* Elements of priority queues need to be comparable, so we restrict our map to
* only allow comparable keys
*
* @author Neil Kulkarni
*/
public class MyHashMapPQBuckets<K extends Comparable<K>, V> extends MyHashMap<K, V> {

/**
* Constructor that creates a backing array with default
* initial size and load factor
*/
public MyHashMapPQBuckets() {
super();
}

/**
* Constructor that creates a backing array of initialSize
* and default load factor
*
* @param initialSize initial size of backing array
*/
public MyHashMapPQBuckets(int initialSize) {
super(initialSize);
}

/**
* Constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMapPQBuckets(int initialSize, double maxLoad) {
super(initialSize, maxLoad);
}

@Override
protected Collection<Node> createBucket() {
// This is fancy new-fangled Java that says in plain English:
//
// "Build a PriorityQueue of Nodes, and when you compare two Nodes,
// compare their keys by their key's compareTo method"
//
// Remember, we had K extends Comparable<K> in our class header,
// so we know the keys have implemented a compareTo method
return new PriorityQueue<>(Comparator.comparing(a -> a.key));
}
}
56 changes: 56 additions & 0 deletions lab8/hashmap/MyHashMapTSBuckets.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package hashmap;

import java.util.Collection;
import java.util.Comparator;
import java.util.TreeSet;

/**
* Hash Table with Tree Set buckets
* Elements of tree sets need to be comparable, so we restrict our map to
* only allow comparable keys
*
* @author Neil Kulkarni
*/
public class MyHashMapTSBuckets<K extends Comparable<K>, V> extends MyHashMap<K, V> {

/**
* Constructor that creates a backing array with default
* initial size and load factor
*/
public MyHashMapTSBuckets() {
super();
}

/**
* Constructor that creates a backing array of initialSize
* and default load factor
*
* @param initialSize initial size of backing array
*/
public MyHashMapTSBuckets(int initialSize) {
super(initialSize);
}

/**
* Constructor that creates a backing array of initialSize.
* The load factor (# items / # buckets) should always be <= loadFactor
*
* @param initialSize initial size of backing array
* @param maxLoad maximum load factor
*/
public MyHashMapTSBuckets(int initialSize, double maxLoad) {
super(initialSize, maxLoad);
}

@Override
protected Collection<Node> createBucket() {
// This is fancy new-fangled Java that says in plain English:
//
// "Build a TreeSet of Nodes, and when you compare two Nodes,
// compare their keys by their key's compareTo method"
//
// Remember, we had K extends Comparable<K> in our class header,
// so we know the keys have implemented a compareTo method
return new TreeSet<>(Comparator.comparing(a -> a.key));
}
}
Loading

0 comments on commit aa255c2

Please sign in to comment.